Unverified Commit ce72af87 authored by AUTOMATIC1111's avatar AUTOMATIC1111 Committed by GitHub
Browse files

Merge pull request #7199 from maxaudron/feature/configurable-data-dir

Add flag to store user data sepperate from source code
parents 0834d4ce 23a9d5e2
Loading
Loading
Loading
Loading
+1 −1
Original line number Original line Diff line number Diff line
@@ -8,7 +8,7 @@ import torch
import modules.face_restoration
import modules.face_restoration
import modules.shared
import modules.shared
from modules import shared, devices, modelloader
from modules import shared, devices, modelloader
from modules.paths import script_path, models_path
from modules.paths import models_path


# codeformer people made a choice to include modified basicsr library to their project which makes
# codeformer people made a choice to include modified basicsr library to their project which makes
# it utterly impossible to use it alongside with other libraries that also use basicsr, like GFPGAN.
# it utterly impossible to use it alongside with other libraries that also use basicsr, like GFPGAN.
+3 −1
Original line number Original line Diff line number Diff line
@@ -7,9 +7,11 @@ import git
from modules import paths, shared
from modules import paths, shared


extensions = []
extensions = []
extensions_dir = os.path.join(paths.script_path, "extensions")
extensions_dir = os.path.join(paths.data_path, "extensions")
extensions_builtin_dir = os.path.join(paths.script_path, "extensions-builtin")
extensions_builtin_dir = os.path.join(paths.script_path, "extensions-builtin")


if not os.path.exists(extensions_dir):
    os.makedirs(extensions_dir)


def active():
def active():
    return [x for x in extensions if x.enabled]
    return [x for x in extensions if x.enabled]
+2 −2
Original line number Original line Diff line number Diff line
@@ -6,7 +6,7 @@ import re
from pathlib import Path
from pathlib import Path


import gradio as gr
import gradio as gr
from modules.shared import script_path
from modules.paths import data_path
from modules import shared, ui_tempdir, script_callbacks
from modules import shared, ui_tempdir, script_callbacks
import tempfile
import tempfile
from PIL import Image
from PIL import Image
@@ -289,7 +289,7 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
def connect_paste(button, paste_fields, input_comp, jsfunc=None):
def connect_paste(button, paste_fields, input_comp, jsfunc=None):
    def paste_func(prompt):
    def paste_func(prompt):
        if not prompt and not shared.cmd_opts.hide_ui_dir_config:
        if not prompt and not shared.cmd_opts.hide_ui_dir_config:
            filename = os.path.join(script_path, "params.txt")
            filename = os.path.join(data_path, "params.txt")
            if os.path.exists(filename):
            if os.path.exists(filename):
                with open(filename, "r", encoding="utf8") as file:
                with open(filename, "r", encoding="utf8") as file:
                    prompt = file.read()
                    prompt = file.read()
+2 −3
Original line number Original line Diff line number Diff line
@@ -6,12 +6,11 @@ import facexlib
import gfpgan
import gfpgan


import modules.face_restoration
import modules.face_restoration
from modules import shared, devices, modelloader
from modules import paths, shared, devices, modelloader
from modules.paths import models_path


model_dir = "GFPGAN"
model_dir = "GFPGAN"
user_path = None
user_path = None
model_path = os.path.join(models_path, model_dir)
model_path = os.path.join(paths.models_path, model_dir)
model_url = "https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth"
model_url = "https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth"
have_gfpgan = False
have_gfpgan = False
loaded_gfpgan_model = None
loaded_gfpgan_model = None
+3 −1
Original line number Original line Diff line number Diff line
@@ -4,8 +4,10 @@ import os.path


import filelock
import filelock


from modules.paths import data_path


cache_filename = "cache.json"

cache_filename = os.path.join(data_path, "cache.json")
cache_data = None
cache_data = None




Loading