Unverified Commit 1bf01b73 authored by AUTOMATIC1111's avatar AUTOMATIC1111 Committed by GitHub
Browse files

Merge pull request #11046 from akx/ded-code

Remove a bunch of unused/vestigial code
parents d06af4e5 ba70a220
Loading
Loading
Loading
Loading
+0 −7
Original line number Original line Diff line number Diff line
@@ -32,13 +32,6 @@ import piexif
import piexif.helper
import piexif.helper




def upscaler_to_index(name: str):
    try:
        return [x.name.lower() for x in shared.sd_upscalers].index(name.lower())
    except Exception as e:
        raise HTTPException(status_code=400, detail=f"Invalid upscaler, needs to be one of these: {' , '.join([x.name for x in shared.sd_upscalers])}") from e


def script_name_to_index(name, scripts):
def script_name_to_index(name, scripts):
    try:
    try:
        return [script.title().lower() for script in scripts].index(name.lower())
        return [script.title().lower() for script in scripts].index(name.lower())
+0 −4
Original line number Original line Diff line number Diff line
@@ -274,10 +274,6 @@ class PromptStyleItem(BaseModel):
    prompt: Optional[str] = Field(title="Prompt")
    prompt: Optional[str] = Field(title="Prompt")
    negative_prompt: Optional[str] = Field(title="Negative Prompt")
    negative_prompt: Optional[str] = Field(title="Negative Prompt")


class ArtistItem(BaseModel):
    name: str = Field(title="Name")
    score: float = Field(title="Score")
    category: str = Field(title="Category")


class EmbeddingItem(BaseModel):
class EmbeddingItem(BaseModel):
    step: Optional[int] = Field(title="Step", description="The number of steps that were used to train this embedding, if available")
    step: Optional[int] = Field(title="Step", description="The number of steps that were used to train this embedding, if available")
+0 −4
Original line number Original line Diff line number Diff line
@@ -15,7 +15,6 @@ model_dir = "Codeformer"
model_path = os.path.join(models_path, model_dir)
model_path = os.path.join(models_path, model_dir)
model_url = 'https://github.com/sczhou/CodeFormer/releases/download/v0.1.0/codeformer.pth'
model_url = 'https://github.com/sczhou/CodeFormer/releases/download/v0.1.0/codeformer.pth'


have_codeformer = False
codeformer = None
codeformer = None




@@ -123,9 +122,6 @@ def setup_model(dirname):


                return restored_img
                return restored_img


        global have_codeformer
        have_codeformer = True

        global codeformer
        global codeformer
        codeformer = FaceRestorerCodeFormer(dirname)
        codeformer = FaceRestorerCodeFormer(dirname)
        shared.face_restorers.append(codeformer)
        shared.face_restorers.append(codeformer)
+0 −7
Original line number Original line Diff line number Diff line
@@ -15,13 +15,6 @@ def has_mps() -> bool:
    else:
    else:
        return mac_specific.has_mps
        return mac_specific.has_mps


def extract_device_id(args, name):
    for x in range(len(args)):
        if name in args[x]:
            return args[x + 1]

    return None



def get_cuda_device_string():
def get_cuda_device_string():
    from modules import shared
    from modules import shared
+0 −29
Original line number Original line Diff line number Diff line
@@ -174,31 +174,6 @@ def send_image_and_dimensions(x):
    return img, w, h
    return img, w, h





def find_hypernetwork_key(hypernet_name, hypernet_hash=None):
    """Determines the config parameter name to use for the hypernet based on the parameters in the infotext.

    Example: an infotext provides "Hypernet: ke-ta" and "Hypernet hash: 1234abcd". For the "Hypernet" config
    parameter this means there should be an entry that looks like "ke-ta-10000(1234abcd)" to set it to.

    If the infotext has no hash, then a hypernet with the same name will be selected instead.
    """
    hypernet_name = hypernet_name.lower()
    if hypernet_hash is not None:
        # Try to match the hash in the name
        for hypernet_key in shared.hypernetworks.keys():
            result = re_hypernet_hash.search(hypernet_key)
            if result is not None and result[1] == hypernet_hash:
                return hypernet_key
    else:
        # Fall back to a hypernet with the same name
        for hypernet_key in shared.hypernetworks.keys():
            if hypernet_key.lower().startswith(hypernet_name):
                return hypernet_key

    return None


def restore_old_hires_fix_params(res):
def restore_old_hires_fix_params(res):
    """for infotexts that specify old First pass size parameter, convert it into
    """for infotexts that specify old First pass size parameter, convert it into
    width, height, and hr scale"""
    width, height, and hr scale"""
@@ -332,10 +307,6 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
    return res
    return res




settings_map = {}



infotext_to_setting_name_mapping = [
infotext_to_setting_name_mapping = [
    ('Clip skip', 'CLIP_stop_at_last_layers', ),
    ('Clip skip', 'CLIP_stop_at_last_layers', ),
    ('Conditional mask weight', 'inpainting_mask_weight'),
    ('Conditional mask weight', 'inpainting_mask_weight'),
Loading