Compare commits
3 Commits
d968795628
...
fd51a0625f
Author | SHA1 | Date | |
---|---|---|---|
fd51a0625f | |||
2162cf78cd | |||
8036dc33b3 |
6
generate
6
generate
@ -1,10 +1,10 @@
|
|||||||
#!/home/fname/Projects/OpenSource/python-mommy-venv/.venv/bin/python3
|
#!.venv/bin/python3
|
||||||
import requests
|
import requests
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
CARGO_MOMMY_DATA = "https://raw.githubusercontent.com/diamondburned/go-mommy/refs/heads/main/responses.json"
|
CARGO_MOMMY_DATA = "https://raw.githubusercontent.com/Gankra/cargo-mommy/refs/heads/main/responses.json"
|
||||||
MODULE_PATH = Path("python_mommy_venv")
|
MODULE_PATH = Path("python_mommy_venv")
|
||||||
|
|
||||||
|
|
||||||
@ -14,7 +14,7 @@ if __name__ == "__main__":
|
|||||||
res = requests.get(CARGO_MOMMY_DATA)
|
res = requests.get(CARGO_MOMMY_DATA)
|
||||||
if not res.ok:
|
if not res.ok:
|
||||||
raise Exception(f"couldn't fetch {CARGO_MOMMY_DATA} ({res.status_code})")
|
raise Exception(f"couldn't fetch {CARGO_MOMMY_DATA} ({res.status_code})")
|
||||||
|
|
||||||
print(f"writing {Path(MODULE_PATH, 'responses.json')}")
|
print(f"writing {Path(MODULE_PATH, 'responses.json')}")
|
||||||
with Path(MODULE_PATH, "responses.json").open("w") as f:
|
with Path(MODULE_PATH, "responses.json").open("w") as f:
|
||||||
json.dump(res.json(), f, indent=4)
|
json.dump(res.json(), f, indent=4)
|
||||||
|
@ -8,6 +8,7 @@ import argparse
|
|||||||
|
|
||||||
from .responses import compile_config
|
from .responses import compile_config
|
||||||
from .static import IS_VENV, VENV_DIRECTORY, CONFIG_DIRECTORY, COMPILED_CONFIG_FILE_NAME
|
from .static import IS_VENV, VENV_DIRECTORY, CONFIG_DIRECTORY, COMPILED_CONFIG_FILE_NAME
|
||||||
|
from ntpath import devnull
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
format='%(message)s',
|
format='%(message)s',
|
||||||
@ -108,12 +109,11 @@ def write_compile_config(local: bool):
|
|||||||
(VENV_DIRECTORY / COMPILED_CONFIG_FILE_NAME).unlink(missing_ok=True)
|
(VENV_DIRECTORY / COMPILED_CONFIG_FILE_NAME).unlink(missing_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def wrap_interpreter(path: Path):
|
def wrap_interpreter(path: Path, symlink_target: Path):
|
||||||
mommy_logger.info("mommy found a symlink to an interpreter~ %s", str(path))
|
mommy_logger.info("mommy found a symlink to an interpreter~ %s", str(path))
|
||||||
serious_logger.info("interpreter symlink found at %s", str(path))
|
serious_logger.info("interpreter symlink found at %s", str(path))
|
||||||
|
|
||||||
inner_symlink = path.parent / ("inner_" + path.name)
|
inner_symlink = path.parent / ("inner_" + path.name)
|
||||||
symlink_target = path.resolve()
|
|
||||||
|
|
||||||
if inner_symlink.exists():
|
if inner_symlink.exists():
|
||||||
raise Exception("inner symlink somehow already exists. This shouldn't happen because of prior checks")
|
raise Exception("inner symlink somehow already exists. This shouldn't happen because of prior checks")
|
||||||
@ -203,6 +203,11 @@ def mommify_venv():
|
|||||||
|
|
||||||
mommy_logger.info("mommy looks in %s to mess your system up~ <33", str(bin_path))
|
mommy_logger.info("mommy looks in %s to mess your system up~ <33", str(bin_path))
|
||||||
serious_logger.info("scanning binary directory of venv at %s", str(bin_path))
|
serious_logger.info("scanning binary directory of venv at %s", str(bin_path))
|
||||||
|
|
||||||
|
resolved_symlinks = {}
|
||||||
|
for path in list(bin_path.iterdir()):
|
||||||
|
if path.is_symlink():
|
||||||
|
resolved_symlinks[path.name] = path.resolve()
|
||||||
|
|
||||||
for path in list(bin_path.iterdir()):
|
for path in list(bin_path.iterdir()):
|
||||||
name = path.name
|
name = path.name
|
||||||
@ -213,10 +218,10 @@ def mommify_venv():
|
|||||||
if name.startswith("inner_"):
|
if name.startswith("inner_"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if subprocess.run([str(path), '-c', '"exit(0)"']).returncode != 0:
|
if subprocess.run([str(path), '-c', '"exit(0)"'], stdout=sys.devnull).returncode != 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
wrap_interpreter(path)
|
wrap_interpreter(path, resolved_symlinks[path.name])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# could be pip
|
# could be pip
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
"well done~!\n{role} is so happy for you~",
|
"well done~!\n{role} is so happy for you~",
|
||||||
"what a good {affectionate_term} you are~",
|
"what a good {affectionate_term} you are~",
|
||||||
"that's {role}'s clever little {affectionate_term}~",
|
"that's {role}'s clever little {affectionate_term}~",
|
||||||
|
"you're doing so well~!",
|
||||||
|
"you're making {role} so happy~",
|
||||||
"{role} loves {pronoun} cute little {affectionate_term}~"
|
"{role} loves {pronoun} cute little {affectionate_term}~"
|
||||||
],
|
],
|
||||||
"negative": [
|
"negative": [
|
||||||
@ -24,6 +26,7 @@
|
|||||||
"don't forget to hydrate~",
|
"don't forget to hydrate~",
|
||||||
"aww, you'll get it next time~",
|
"aww, you'll get it next time~",
|
||||||
"do you need {role}'s help~?",
|
"do you need {role}'s help~?",
|
||||||
|
"everything's gonna be ok~",
|
||||||
"{role} still loves you no matter what~",
|
"{role} still loves you no matter what~",
|
||||||
"oh no did {role}'s little {affectionate_term} make a big mess~?",
|
"oh no did {role}'s little {affectionate_term} make a big mess~?",
|
||||||
"{role} knows {pronoun} little {affectionate_term} can do better~",
|
"{role} knows {pronoun} little {affectionate_term} can do better~",
|
||||||
@ -33,7 +36,26 @@
|
|||||||
"oh, darling, you're almost there~",
|
"oh, darling, you're almost there~",
|
||||||
"does {role}'s little {affectionate_term} need a bit of a break~?",
|
"does {role}'s little {affectionate_term} need a bit of a break~?",
|
||||||
"oops~! {role} loves you anyways~",
|
"oops~! {role} loves you anyways~",
|
||||||
|
"try again for {role}, {affectionate_term}~",
|
||||||
"don't worry, {role} knows you can do it~"
|
"don't worry, {role} knows you can do it~"
|
||||||
|
],
|
||||||
|
"overflow": [
|
||||||
|
"{role} has executed too many times and needs to take a nap~"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ominous": {
|
||||||
|
"positive": [
|
||||||
|
"What you have set in motion today will be remembered for aeons to come!",
|
||||||
|
"{role} will see to it that {pronoun} little {affectionate_term}'s name is feared~",
|
||||||
|
"{role} is proud of the evil seed {pronoun} {affectionate_term} has planted into this accursed world"
|
||||||
|
],
|
||||||
|
"negative": [
|
||||||
|
"Ah, failure? {role} will make sure the stars are right next time",
|
||||||
|
"Does {role}'s little {affectionate_term} need more time for worship~?",
|
||||||
|
"May the mark of the beast stain your flesh forever, {role} will haunt your soul forevermore"
|
||||||
|
],
|
||||||
|
"overflow": [
|
||||||
|
"THOU HAST DRUNK TOO DEEPLY OF THE FONT"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"thirsty": {
|
"thirsty": {
|
||||||
@ -46,10 +68,11 @@
|
|||||||
"*pats your butt*\nthat's a good {affectionate_term}~",
|
"*pats your butt*\nthat's a good {affectionate_term}~",
|
||||||
"*drags {pronoun} nail along your cheek*\nsuch a good {affectionate_term}~",
|
"*drags {pronoun} nail along your cheek*\nsuch a good {affectionate_term}~",
|
||||||
"*bites {pronoun} lip*\nmhmm~",
|
"*bites {pronoun} lip*\nmhmm~",
|
||||||
"give {pronoun} a kiss~",
|
"give {role} a kiss~",
|
||||||
"*heavy breathing against your neck*"
|
"*heavy breathing against your neck*"
|
||||||
],
|
],
|
||||||
"negative": [
|
"negative": [
|
||||||
|
"you're so cute when you're flustered~",
|
||||||
"do you think you're going to get a reward from {role} like that~?",
|
"do you think you're going to get a reward from {role} like that~?",
|
||||||
"*grabs your hair and pulls your head back*\nyou can do better than that for {role} can't you~?",
|
"*grabs your hair and pulls your head back*\nyou can do better than that for {role} can't you~?",
|
||||||
"if you don't learn how to code better, {role} is going to put you in time-out~",
|
"if you don't learn how to code better, {role} is going to put you in time-out~",
|
||||||
@ -58,6 +81,9 @@
|
|||||||
"gosh you must be flustered~",
|
"gosh you must be flustered~",
|
||||||
"are you just keysmashing now~?\ncute~",
|
"are you just keysmashing now~?\ncute~",
|
||||||
"is {role}'s little {affectionate_term} having trouble reaching the keyboard~?"
|
"is {role}'s little {affectionate_term} having trouble reaching the keyboard~?"
|
||||||
|
],
|
||||||
|
"overflow": [
|
||||||
|
"you've been a bad little {affectionate_term} and worn out {role}~"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"yikes": {
|
"yikes": {
|
||||||
@ -71,7 +97,10 @@
|
|||||||
"{role} is getting hot~",
|
"{role} is getting hot~",
|
||||||
"that's a good {denigrating_term}~",
|
"that's a good {denigrating_term}~",
|
||||||
"yes~\nyes~~\nyes~~~",
|
"yes~\nyes~~\nyes~~~",
|
||||||
"{role}'s going to keep {pronoun} good little {denigrating_term}~"
|
"{role}'s going to keep {pronoun} good little {denigrating_term}~",
|
||||||
|
"open wide {denigrating_term}.\nyou've earned {role}'s {part}~",
|
||||||
|
"do you want {role}'s {part}?\nkeep this up and you'll earn it~",
|
||||||
|
"oooh~ what a good {denigrating_term} you are~"
|
||||||
],
|
],
|
||||||
"negative": [
|
"negative": [
|
||||||
"you filthy {denigrating_term}~\nyou made a mess, now clean it up~\nwith your tongue~",
|
"you filthy {denigrating_term}~\nyou made a mess, now clean it up~\nwith your tongue~",
|
||||||
@ -83,7 +112,14 @@
|
|||||||
"{role} doesn't think {pronoun} little {denigrating_term} should have permission to wear clothes anymore~",
|
"{role} doesn't think {pronoun} little {denigrating_term} should have permission to wear clothes anymore~",
|
||||||
"never forget you belong to {role}~",
|
"never forget you belong to {role}~",
|
||||||
"does {role} need to put you in the {denigrating_term} wiggler~?",
|
"does {role} need to put you in the {denigrating_term} wiggler~?",
|
||||||
"{role} is starting to wonder if you should just give up and become {pronoun} breeding stock~"
|
"{role} is starting to wonder if you should just give up and become {pronoun} breeding stock~",
|
||||||
|
"on your knees {denigrating_term}~",
|
||||||
|
"oh dear. {role} is not pleased",
|
||||||
|
"one spank per error sounds appropriate, don't you think {denigrating_term}?",
|
||||||
|
"no more {part} for you {denigrating_term}"
|
||||||
|
],
|
||||||
|
"overflow": [
|
||||||
|
"brats like you don't get to talk to {role}"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -108,9 +144,7 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"role": {
|
"role": {
|
||||||
"defaults": [
|
"defaults": []
|
||||||
"mommy"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"affectionate_term": {
|
"affectionate_term": {
|
||||||
"defaults": [
|
"defaults": [
|
||||||
|
@ -14,7 +14,7 @@ serious_logger = logging.getLogger("serious")
|
|||||||
|
|
||||||
PREFIX = "MOMMY"
|
PREFIX = "MOMMY"
|
||||||
|
|
||||||
RESPONSES_URL = "https://raw.githubusercontent.com/diamondburned/go-mommy/refs/heads/main/responses.json"
|
RESPONSES_URL = "https://raw.githubusercontent.com/Gankra/cargo-mommy/refs/heads/main/responses.json"
|
||||||
RESPONSES_FILE = Path(__file__).parent / "responses.json"
|
RESPONSES_FILE = Path(__file__).parent / "responses.json"
|
||||||
ADDITIONAL_ENV_VARS = {
|
ADDITIONAL_ENV_VARS = {
|
||||||
"pronoun": "PRONOUNS",
|
"pronoun": "PRONOUNS",
|
||||||
@ -28,7 +28,7 @@ ADDITIONAL_ENV_VARS = {
|
|||||||
def _load_config_file(config_file: Path) -> Dict[str, List[str]]:
|
def _load_config_file(config_file: Path) -> Dict[str, List[str]]:
|
||||||
with config_file.open("r") as f:
|
with config_file.open("r") as f:
|
||||||
data = toml.load(f)
|
data = toml.load(f)
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
for key, value in data.items():
|
for key, value in data.items():
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
@ -43,7 +43,7 @@ ADDITIONAL_PROGRAM_PREFIXES = [
|
|||||||
"cargo", # only as fallback if user already configured cargo
|
"cargo", # only as fallback if user already configured cargo
|
||||||
]
|
]
|
||||||
|
|
||||||
def _get_env_var_names(name: str):
|
def _get_env_var_names(name: str):
|
||||||
BASE = PREFIX + "_" + name.upper()
|
BASE = PREFIX + "_" + name.upper()
|
||||||
yield "PYTHON_" + BASE
|
yield "PYTHON_" + BASE
|
||||||
yield BASE
|
yield BASE
|
||||||
@ -56,23 +56,27 @@ def _get_env_value(name: str) -> Optional[str]:
|
|||||||
val = os.environ.get(key)
|
val = os.environ.get(key)
|
||||||
if val is not None:
|
if val is not None:
|
||||||
return val
|
return val
|
||||||
|
|
||||||
for key in _get_env_var_names(name):
|
for key in _get_env_var_names(name):
|
||||||
val = os.environ.get(key)
|
val = os.environ.get(key)
|
||||||
if val is not None:
|
if val is not None:
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
def compile_config(disable_requests: bool = False) -> dict:
|
def compile_config(disable_requests: bool = False) -> dict:
|
||||||
global RESPONSES_FILE, RESPONSES_URL
|
global RESPONSES_FILE, RESPONSES_URL
|
||||||
|
|
||||||
data = json.loads(RESPONSES_FILE.read_text())
|
data = json.loads(RESPONSES_FILE.read_text())
|
||||||
|
|
||||||
if not disable_requests:
|
if not disable_requests:
|
||||||
mommy_logger.info("mommy downloads newest responses for her girl~ %s", RESPONSES_URL)
|
mommy_logger.info("mommy downloads newest responses for her girl~ %s", RESPONSES_URL)
|
||||||
serious_logger.info("downloading cargo mommy responses: %s", RESPONSES_URL)
|
serious_logger.info("downloading cargo mommy responses: %s", RESPONSES_URL)
|
||||||
r = requests.get(RESPONSES_URL)
|
try:
|
||||||
data = r.json()
|
r = requests.get(RESPONSES_URL)
|
||||||
|
data = r.json()
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
mommy_logger.info("mommy couldn't fetch the url~")
|
||||||
|
serious_logger.info("couldnt fetch the url")
|
||||||
|
|
||||||
config_definition: Dict[str, dict] = data["vars"]
|
config_definition: Dict[str, dict] = data["vars"]
|
||||||
mood_definitions: Dict[str, dict] = data["moods"]
|
mood_definitions: Dict[str, dict] = data["moods"]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user