1

CLean out unused scripts

This commit is contained in:
Evan Pratten 2024-05-16 09:46:16 -04:00
parent be6e2c868b
commit 8d872a499c
8 changed files with 0 additions and 511 deletions

View File

@ -1,37 +0,0 @@
#! /usr/bin/env python
import argparse
import sys
import logging
import subprocess
logger = logging.getLogger(__name__)
def main() -> int:
# Handle program arguments
ap = argparse.ArgumentParser(prog="basejump")
ap.add_argument(
"subcommand", help="The subcommand to run", choices=["init", "fetch", "discover"]
)
ap.add_argument("arguments", nargs=argparse.REMAINDER)
args = ap.parse_args()
# Configure logging
logging.basicConfig(
level=logging.INFO,
format="%(levelname)s: %(message)s",
)
# Execute the appropriate subcommand
real_command_name = f"basejump-{args.subcommand}"
try:
return subprocess.run([real_command_name] + args.arguments).returncode
except FileNotFoundError:
logger.error(f"Unknown subcommand: {args.subcommand}")
logger.error(f"Could not find `{real_command_name}` in $PATH")
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,59 +0,0 @@
#! /usr/bin/env python
import argparse
import sys
import logging
import subprocess
import json
from pprint import pprint
from pathlib import Path
logger = logging.getLogger(__name__)
def main() -> int:
# Handle program arguments
ap = argparse.ArgumentParser(
prog="basejump discover", description="Discover repos in a codebase"
)
ap.add_argument("root_path", help="The root path of the codebase", type=Path)
ap.add_argument(
"-v", "--verbose", help="Enable verbose logging", action="store_true"
)
args = ap.parse_args()
# Configure logging
logging.basicConfig(
level=logging.DEBUG if args.verbose else logging.INFO,
format="%(levelname)s: %(message)s",
)
# Find all git repos in the codebase
logger.info(f"Searching for git repos in: {args.root_path}")
repos = []
for path in args.root_path.rglob(".git"):
repos.append({"path":str(path.parent.absolute())})
# For each repo, find the upstream
logger.info("Finding upstream URLs...")
for repo in repos:
# Get the upstream URL
upstream_url = subprocess.run(
["git", "remote", "get-url", "origin"],
cwd=repo["path"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
).stdout.strip()
# Add the upstream URL to the repo config
repo["upstream"] = upstream_url
# Print the results
logger.info("Found the following repos:")
print(json.dumps(repos, indent=4))
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,58 +0,0 @@
#! /usr/bin/env python
import argparse
import sys
import logging
import json
import subprocess
import os
from pathlib import Path
logger = logging.getLogger(__name__)
def main() -> int:
# Handle program arguments
ap = argparse.ArgumentParser(
prog="basejump fetch", description="Fetches all changes for a whole codebase"
)
ap.add_argument("name", help="The name of the codebase")
ap.add_argument("--pull", help="Perform a full pull", action="store_true")
ap.add_argument(
"-v", "--verbose", help="Enable verbose logging", action="store_true"
)
args = ap.parse_args()
# Configure logging
logging.basicConfig(
level=logging.DEBUG if args.verbose else logging.INFO,
format="%(levelname)s: %(message)s",
)
# Ensure that the basejump config dir exists
bj_config_dir = Path.home() / ".config" / "basejump"
bj_config_dir.mkdir(parents=True, exist_ok=True)
# Read the codebase config
codebase_config_path = bj_config_dir / f"{args.name}.codebase.json"
if not codebase_config_path.exists():
logger.error(f"Codebase `{args.name}` does not exist")
return 1
config = json.loads(codebase_config_path.read_text())
# Handle each repository
for repo in config["repos"]:
logger.info(f"Fetching {repo['path']}")
# If we are in pull mode, do a git pull
if args.pull:
subprocess.run(["git", "pull"], cwd=repo["path"])
# Otherwise fetch all
else:
subprocess.run(["git", "fetch", "--all"], cwd=repo["path"])
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,75 +0,0 @@
#! /usr/bin/env python
import argparse
import sys
import logging
import json
import subprocess
import os
from pathlib import Path
logger = logging.getLogger(__name__)
def main() -> int:
# Handle program arguments
ap = argparse.ArgumentParser(
prog="basejump init", description="Creates a new basejump codebase"
)
ap.add_argument("name", help="The name of the codebase")
ap.add_argument(
"-v", "--verbose", help="Enable verbose logging", action="store_true"
)
args = ap.parse_args()
# Configure logging
logging.basicConfig(
level=logging.DEBUG if args.verbose else logging.INFO,
format="%(levelname)s: %(message)s",
)
# Ensure that the basejump config dir exists
bj_config_dir = Path.home() / ".config" / "basejump"
bj_config_dir.mkdir(parents=True, exist_ok=True)
# Create a new codebase definition
codebase_config_path = bj_config_dir / f"{args.name}.codebase.json"
# If the path already exists, abort
if codebase_config_path.exists():
logger.error(f"Codebase `{args.name}` already exists")
logger.info(f"Config file at: {codebase_config_path}")
return 1
# Create a template codebase config
template_config = {
"name": args.name,
"repos": [
{
"path": "/tmp/example",
"upstream": "https://github.com/octocat/Hello-World",
}
],
}
# Write the template config to disk
codebase_config_path.write_text(json.dumps(template_config, indent=4))
# Open $EDITOR (or vim) to edit the config
subprocess.run([os.environ.get("EDITOR", "vim"), str(codebase_config_path)])
# Iterate through every repo and clone it
config = json.loads(codebase_config_path.read_text())
for repo in config["repos"]:
if Path(repo["path"]).exists():
logger.info(f"Skipping {repo['path']}, already exists")
continue
# Do a clone
logger.info(f"Cloning {repo['upstream']} into {repo['path']}")
subprocess.run(["git", "clone", repo["upstream"], repo["path"]])
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,217 +0,0 @@
#! /usr/bin/env python3
import argparse
import sys
import logging
import requests
import socket
import urllib.parse
from pathlib import Path
logger = logging.getLogger(__name__)
G_CLIENT_ID = "107923498573-ruh1uhkfe1t5f18vam6sckq7pqer1vmg.apps.googleusercontent.com"
G_SCOPES = ["https://www.googleapis.com/auth/photoslibrary.appendonly"]
G_REDIRECT_URI = "http://localhost:7842"
def get_google_oauth_token() -> str:
"""Either log the user in, or used a stored refresh token to get an OAuth token"""
refresh_token_path = Path("~/.config/gp-upload/refresh-token").expanduser()
client_secret_path = Path("~/.config/gp-upload/client-secret").expanduser()
# Read the client secret
with client_secret_path.open("r") as f:
client_secret = f.read().strip()
# Check if we have a refresh token
if refresh_token_path.exists():
logger.info("Using stored refresh token")
# Read the refresh token
with refresh_token_path.open("r") as f:
refresh_token = f.read().strip()
# Make the request
response = requests.post(
"https://oauth2.googleapis.com/token",
data={
"client_id": G_CLIENT_ID,
"grant_type": "refresh_token",
"refresh_token": refresh_token,
"client_secret": client_secret,
},
)
# Check for errors
if response.status_code != 200:
logger.error("Failed to get OAuth token")
logger.error(response.text)
return None
# Return the OAuth token
return response.json()["access_token"]
# Otherwise, log the user in
else:
logger.info("Logging user in")
# Direct the user to Google's login page
logger.info("Please visit the following URL to log in:")
logger.info(
f"https://accounts.google.com/o/oauth2/v2/auth?client_id={G_CLIENT_ID}&response_type=code&scope={'+'.join(G_SCOPES)}&redirect_uri={G_REDIRECT_URI}&access_type=offline&prompt=consent"
)
# Open a TCP server to listen for the redirect
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("localhost", 7842))
s.listen()
# Wait for the redirect
conn, addr = s.accept()
with conn:
# Read the request
request = conn.recv(1024).decode("utf-8")
# Parse the request
request = request.splitlines()
request = [line for line in request if line.startswith("GET")]
request = request[0].split(" ")[1]
request = request.split("?")[1]
request = request.split("&")
request = {key: urllib.parse.unquote(value) for key, value in [pair.split("=") for pair in request]}
# Check for errors
if "error" in request:
logger.error(f"Failed to log in: {request['error']}")
conn.sendall(b"HTTP/1.1 500 Internal Server Error\n\n<html><body><h1>Failed to log in</h1></body></html>")
conn.close()
return None
# Return a message to the user and close the socket
conn.sendall(b"HTTP/1.1 200 OK\n\n<html><body><h1>Success!</h1></body></html>")
conn.close()
# Make the request
response = requests.post(
"https://oauth2.googleapis.com/token",
data={
"client_id": G_CLIENT_ID,
"code": request["code"],
"grant_type": "authorization_code",
"redirect_uri": G_REDIRECT_URI,
"client_secret": client_secret,
},
)
logger.info(f"Response: {response.text}")
# Check for errors
if response.status_code != 200:
logger.error("Failed to get OAuth token")
logger.error(response.text)
return None
access_token = response.json()["access_token"]
refresh_token = response.json()["refresh_token"]
# Save the refresh token
refresh_token_path.parent.mkdir(parents=True, exist_ok=True)
with refresh_token_path.open("w") as f:
f.write(refresh_token)
# Return the OAuth token
return access_token
def upload_file(file: Path, oauth_token: str):
# Read the file
with file.open("rb") as f:
file_data = f.read()
# Make the upload request
logger.info("Creating new upload")
response = requests.post(
"https://photoslibrary.googleapis.com/v1/uploads",
headers={
"Authorization": f"Bearer {oauth_token}",
"Content-type": "application/octet-stream",
"X-Goog-Upload-File-Name": file.name,
"X-Goog-Upload-Protocol": "raw",
},
data=file_data,
)
logger.info(f"Uploaded {file.stat().st_size} bytes")
# Check for errors
if response.status_code != 200:
logger.error(f"Failed to upload: {file}")
logger.error(response.text)
return None
# Get the upload token
upload_token = response.text
logger.info(f"Upload token: {upload_token}")
# Create the media item
logger.info("Creating new media item")
response = requests.post(
"https://photoslibrary.googleapis.com/v1/mediaItems:batchCreate",
headers={
"Authorization": f"Bearer {oauth_token}",
"Content-type": "application/json",
},
json={
"newMediaItems": [
{
"description": "",
"simpleMediaItem": {
"fileName": file.name,
"uploadToken": upload_token,
},
}
]
},
)
# Check for errors
if response.status_code != 200:
logger.error(f"Failed to create media item: {file}")
logger.error(response.text)
return None
# Log some info about the action
for new_item in response.json()["newMediaItemResults"]:
if "mediaItem" in new_item:
logger.info(f"Created media item: {new_item['mediaItem']['filename']}")
logger.info(f"URL: {new_item['mediaItem']['productUrl']}")
def main() -> int:
# Handle program arguments
ap = argparse.ArgumentParser(
prog="gp-upload", description="Upload a file to Google Photos"
)
ap.add_argument("file", help="File to upload")
ap.add_argument(
"-v", "--verbose", help="Enable verbose logging", action="store_true"
)
args = ap.parse_args()
# Configure logging
logging.basicConfig(
level=logging.DEBUG if args.verbose else logging.INFO,
format="%(levelname)s: %(message)s",
)
# Authenticate
oauth_token = get_google_oauth_token()
if oauth_token is None:
return 1
# Upload
upload_file(Path(args.file), oauth_token)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@ -1,9 +0,0 @@
#! /bin/sh
set -ex
# Download the appimage to .local/bin
mkdir -p ~/.local/bin
wget https://github.com/neovim/neovim/releases/download/stable/nvim.appimage -O ~/.local/bin/nvim
# Make it executable
chmod +x ~/.local/bin/nvim

View File

@ -1,39 +0,0 @@
#! /usr/bin/env bash
set -e
echo "You have chosen to isntall neovim from source."
# If ~/src/neovim doesn't exist, clone a fresh copy
cd ~/src
if [ ! -d ~/src/neovim ]; then
git clone https://github.com/neovim/neovim
fi
cd neovim
# Handle branch checkout
echo "Do you want to switch to the stable branch? (y/n)"
read -r -n 1 response
if [[ $response =~ ^([yY][eE][sS]|[yY])$ ]]; then
git checkout stable
fi
# Figure out the appropriate make command.
if [ -x "$(command -v gmake)" ]; then
MAKE_CMD=gmake
else
MAKE_CMD=make
fi
# Determine the install prefix
NVIM_INSTALL_PREFIX=${NVIM_INSTALL_PREFIX:-$HOME/.local}
# Build
echo "Building neovim..."
$MAKE_CMD CMAKE_BUILD_TYPE=Release CMAKE_EXTRA_FLAGS="-DCMAKE_INSTALL_PREFIX=$NVIM_INSTALL_PREFIX"
# Install
echo "Would you like to install neovim? (y/n)"
read -r -n 1 response
if [[ $response =~ ^([yY][eE][sS]|[yY])$ ]]; then
$MAKE_CMD install
fi

View File

@ -1,17 +0,0 @@
#! /bin/sh
set -e
# Require an argument containing a directory
if [ $# -ne 3 ]; then
echo "Usage: scp-make-upload-acl <directory> <key> <comment>"
exit 1
fi
# Require the directory to exist
if [ ! -d "$1" ]; then
echo "Directory $1 does not exist"
exit 1
fi
# If all is ok, then print out the authorized_keys line that restricts that key to that directory
echo "command=\"scp -t $1\",no-agent-forwarding,no-port-forwarding,no-pty,no-user-rc,no-X11-forwarding $2 $3"