diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..f4f5948
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+/secrets
+/configs/remmina
+__pycache__
\ No newline at end of file
diff --git a/.gitmodules b/.gitmodules
new file mode 100644
index 0000000..d6e33fc
--- /dev/null
+++ b/.gitmodules
@@ -0,0 +1,35 @@
+[submodule "configs/nvim/pack/tpope/start/repeat"]
+ path = configs/nvim/pack/tpope/start/repeat
+ url = https://tpope.io/vim/repeat.git
+ ignore = dirty
+[submodule "configs/nvim/pack/ggandor/start/leap.nvim"]
+ path = configs/nvim/pack/ggandor/start/leap.nvim
+ url = https://github.com/ggandor/leap.nvim
+ ignore = dirty
+[submodule "configs/nvim/pack/tpope/start/commentary"]
+ path = configs/nvim/pack/tpope/start/commentary
+ url = https://tpope.io/vim/commentary.git
+ ignore = dirty
+[submodule "configs/nvim/pack/jiangmiao/start/auto-pairs"]
+ path = configs/nvim/pack/jiangmiao/start/auto-pairs
+ url = https://github.com/jiangmiao/auto-pairs
+ ignore = dirty
+[submodule "configs/nvim/pack/airblade/start/vim-gitgutter"]
+ path = configs/nvim/pack/airblade/start/vim-gitgutter
+ url = https://github.com/airblade/vim-gitgutter.git
+ ignore = dirty
+[submodule "configs/nvim/third_party/usda-syntax"]
+ path = configs/nvim/third_party/usda-syntax
+ url = https://github.com/superfunc/usda-syntax
+[submodule "third_party/ufw-application-profiles"]
+ path = third_party/ufw-application-profiles
+ url = https://github.com/ageis/ufw-application-profiles
+[submodule "configs/nvim/pack/smolck/start/command-completion.nvim"]
+ path = configs/nvim/pack/smolck/start/command-completion.nvim
+ url = https://github.com/smolck/command-completion.nvim
+[submodule "configs/nvim/pack/echasnovski/start/mini.trailspace"]
+ path = configs/nvim/pack/echasnovski/start/mini.trailspace
+ url = https://github.com/echasnovski/mini.trailspace
+[submodule "configs/nvim/pack/neovim/start/nvim-lspconfig"]
+ path = configs/nvim/pack/neovim/start/nvim-lspconfig
+ url = https://github.com/neovim/nvim-lspconfig
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..4877f66
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,15 @@
+{
+ "files.associations": {
+ "*.json.liquid": "json",
+ "*.yaml.liquid": "yaml",
+ "*.md.liquid": "markdown",
+ "*.js.liquid": "liquid-javascript",
+ "*.css.liquid": "liquid-css",
+ "*.scss.liquid": "liquid-scss"
+ },
+ // Add python_modules to the python path
+ "python.analysis.extraPaths": [
+ "./python_modules"
+ ],
+ "python.formatting.provider": "black",
+}
\ No newline at end of file
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..0b5a47e
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,16 @@
+{
+ // See https://go.microsoft.com/fwlink/?LinkId=733558
+ // for the documentation about the tasks.json format
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Run install script",
+ "type": "shell",
+ "command": "sh",
+ "args": [
+ "./install-linux.sh"
+ ],
+ "problemMatcher": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f288702
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..c459a12
--- /dev/null
+++ b/README.md
@@ -0,0 +1,27 @@
+# *ew*pratten's *config* files
+
+This repository stores most of my common config files. It is designed to be deployable to pretty much any system. Assuming ideal conditions, any machine is one `sh ./install-` away from behaving like my personal workstation.
+
+*I know its called ew**config**, but at this point, its more of a monorepo of scripts*
+
+## Setup
+
+The scripts in this repository have the following dependencies:
+
+- Git (optional, extremely recommended)
+- ZSH (optional, recommended)
+- Neovim (optional, recommended)
+
+Install and link everything with:
+
+```sh
+mkdir -p ~/.config && cd ~/.config
+git clone https://github.com/ewpratten/ewconfig
+cd ewconfig
+
+# Linux & BSD & probably MacOS (untested)
+sh ./install-linux.sh
+
+# Windows, with GIT BASH
+sh ./install-windows.sh
+```
diff --git a/configs/Code/User/snippets/Unicode.code-snippets b/configs/Code/User/snippets/Unicode.code-snippets
new file mode 100644
index 0000000..cde4a05
--- /dev/null
+++ b/configs/Code/User/snippets/Unicode.code-snippets
@@ -0,0 +1,199 @@
+{
+ // Symbols
+ "Unicode: PI": {
+ "prefix": "uni_pi",
+ "body": "π"
+ },
+ "Unicode: Infinity": {
+ "prefix": "uni_infinity",
+ "body": "∞"
+ },
+ "Unicode: Square Root": {
+ "prefix": "uni_sqrt",
+ "body": "√"
+ },
+ "Unicode: Real": {
+ "prefix": "uni_real",
+ "body": "ℝ"
+ },
+ "Unicode: Natural": {
+ "prefix": "uni_natural",
+ "body": "ℕ"
+ },
+ "Unicode: Integer": {
+ "prefix": "uni_integer",
+ "body": "ℤ"
+ },
+ "Unicode: Rational": {
+ "prefix": "uni_rational",
+ "body": "ℚ"
+ },
+ "Unicode: Complex": {
+ "prefix": "uni_complex",
+ "body": "ℂ"
+ },
+ "Unicode: Plus or Minus": {
+ "prefix": "uni_plus_minus",
+ "body": "±"
+ },
+ "Unicode: Squared": {
+ "prefix": "uni_squared",
+ "body": "²"
+ },
+ "Unicode: Cubed": {
+ "prefix": "uni_cubed",
+ "body": "³"
+ },
+ "Unicode: Hypercubed": {
+ "prefix": "uni_hypercubed",
+ "body": "⁴"
+ },
+ "Unicode: Power of N": {
+ "prefix": "uni_power_n",
+ "body": "ⁿ"
+ },
+ "Unicode: Degree": {
+ "prefix": "uni_degree",
+ "body": "°"
+ },
+ "Unicode: Half": {
+ "prefix": "uni_half",
+ "body": "½"
+ },
+ "Unicode: Third": {
+ "prefix": "uni_third",
+ "body": "⅓"
+ },
+ "Unicode: Two Thirds": {
+ "prefix": "uni_two_thirds",
+ "body": "⅔"
+ },
+ "Unicode: Quarter": {
+ "prefix": "uni_quarter",
+ "body": "¼"
+ },
+ "Unicode: Three Quarters": {
+ "prefix": "uni_three_quarters",
+ "body": "¾"
+ },
+
+ // Equality
+ "Unicode: Not Equal To": {
+ "prefix": "uni_neq",
+ "body": "≠"
+ },
+ "Unicode: Approximately Equal To": {
+ "prefix": "uni_approx",
+ "body": "≈"
+ },
+ "Unicode: Greater Than or Equal To": {
+ "prefix": "uni_gte",
+ "body": "≥"
+ },
+ "Unicode: Less Than or Equal To": {
+ "prefix": "uni_lte",
+ "body": "≤"
+ },
+
+ // Set Operations
+ "Unicode: Union": {
+ "prefix": "uni_union",
+ "body": "∪"
+ },
+ "Unicode: Intersection": {
+ "prefix": "uni_intersect",
+ "body": "∩"
+ },
+ "Unicode: Element Of": {
+ "prefix": "uni_element",
+ "body": "∈"
+ },
+ "Unicode: Not Element Of": {
+ "prefix": "uni_not_element",
+ "body": "∉"
+ },
+ "Unicode: Subset Of": {
+ "prefix": "uni_subset",
+ "body": "⊂"
+ },
+ "Unicode: Superset Of": {
+ "prefix": "uni_superset",
+ "body": "⊃"
+ },
+ "Unicode: Subset Of or Equal To": {
+ "prefix": "uni_subset_eq",
+ "body": "⊆"
+ },
+ "Unicode: Superset Of or Equal To": {
+ "prefix": "uni_superset_eq",
+ "body": "⊇"
+ },
+ "Unicode: Empty Set": {
+ "prefix": "uni_empty",
+ "body": "∅"
+ },
+
+ // Vectors
+ "Unicode: Vector Multiplication": {
+ "prefix": "uni_vector_mult",
+ "body": "⋅"
+ },
+ "Unicode: Vector Cross Product": {
+ "prefix": "uni_vector_cross",
+ "body": "×"
+ },
+ "Unicode: Vector Dot Product": {
+ "prefix": "uni_vector_dot",
+ "body": "·"
+ },
+ "Unicode: Vector Diacritic": {
+ "prefix": "uni_vector",
+ "body": "$1\u20D7$2"
+ },
+ "Unicode: Unit Vector Diacritic": {
+ "prefix": "uni_hat",
+ "body": "$1\u0302$2"
+ },
+
+ // Arrows
+ "Unicode: Left Arrow": {
+ "prefix": "uni_left_arrow",
+ "body": "←"
+ },
+ "Unicode: Right Arrow": {
+ "prefix": "uni_right_arrow",
+ "body": "→"
+ },
+ "Unicode: Up Arrow": {
+ "prefix": "uni_up_arrow",
+ "body": "↑"
+ },
+ "Unicode: Down Arrow": {
+ "prefix": "uni_down_arrow",
+ "body": "↓"
+ },
+ "Unicode: Left Right Arrow": {
+ "prefix": "uni_left_right_arrow",
+ "body": "↔"
+ },
+ "Unicode: Up Down Arrow": {
+ "prefix": "uni_up_down_arrow",
+ "body": "↕"
+ },
+ "Unicode: Top Left Arrow": {
+ "prefix": "uni_top_left_arrow",
+ "body": "↖"
+ },
+ "Unicode: Top Right Arrow": {
+ "prefix": "uni_top_right_arrow",
+ "body": "↗"
+ },
+ "Unicode: Bottom Right Arrow": {
+ "prefix": "uni_bottom_right_arrow",
+ "body": "↘"
+ },
+ "Unicode: Bottom Left Arrow": {
+ "prefix": "uni_bottom_left_arrow",
+ "body": "↙"
+ },
+}
\ No newline at end of file
diff --git a/configs/Code/User/snippets/json.json b/configs/Code/User/snippets/json.json
new file mode 100644
index 0000000..7c23063
--- /dev/null
+++ b/configs/Code/User/snippets/json.json
@@ -0,0 +1,12 @@
+{
+ "Vector3": {
+ "prefix": "vector3",
+ "body": [
+ "{",
+ "\t\"x\": $1,",
+ "\t\"y\": $2,",
+ "\t\"z\": $3",
+ "}",
+ ]
+ }
+}
\ No newline at end of file
diff --git a/configs/Code/User/snippets/python.json b/configs/Code/User/snippets/python.json
new file mode 100644
index 0000000..a60028e
--- /dev/null
+++ b/configs/Code/User/snippets/python.json
@@ -0,0 +1,74 @@
+{
+ "CLI App Skeleton": {
+ "prefix": "cli_app",
+ "body": [
+ "import argparse",
+ "import sys",
+ "import logging",
+ "",
+ "logger = logging.getLogger(__name__)",
+ "",
+ "def main() -> int:",
+ "\t# Handle program arguments",
+ "\tap = argparse.ArgumentParser(prog='$1', description='$2')",
+ "\t$3",
+ "\tap.add_argument('-v', '--verbose', help='Enable verbose logging', action='store_true')"
+ "\targs = ap.parse_args()",
+ "",
+ "\t# Configure logging",
+ "\tlogging.basicConfig(",
+ "\t\tlevel=logging.DEBUG if args.verbose else logging.INFO,",
+ "\t\tformat='%(levelname)s:\t%(message)s',",
+ "\t)",
+ "",
+ "\treturn 0",
+ "",
+ "if __name__ == \"__main__\":",
+ "\tsys.exit(main())"
+ ]
+ },
+ "Request error handler": {
+ "prefix": "rerror",
+ "body": [
+ "if int($1.status_code / 100) != 2:",
+ "\treturn $2"
+ ]
+ },
+ "Path to the current file": {
+ "prefix": "__filepath__",
+ "body": "Path(__file__)"
+ },
+ "Path to the current file's parent directory": {
+ "prefix": "__filedir__",
+ "body": "Path(__file__).parent"
+ },
+ "Disable formatting for block": {
+ "prefix": "nofmt",
+ "body": [
+ "# fmt: off",
+ "$1",
+ "# fmt: on",
+ "$2"
+ ]
+ },
+ "Import Path from pathlib": {
+ "prefix": "impath",
+ "body": "from pathlib import Path"
+ },
+ "Get a logger instance": {
+ "prefix": "logger",
+ "body": "logger = logging.getLogger(${1|__name__,'werkzeug'|})"
+ },
+ "Import dataclass": {
+ "prefix":"impdataclass",
+ "body": "from dataclasses import dataclass${1:, field}"
+ },
+ "Import datetime": {
+ "prefix":"impdatetime",
+ "body": "from datetime import datetime"
+ },
+ "Import enums": {
+ "prefix":"impenum",
+ "body": "from enum import Enum${1:, auto}"
+ }
+}
\ No newline at end of file
diff --git a/configs/Code/User/snippets/rust.json b/configs/Code/User/snippets/rust.json
new file mode 100644
index 0000000..6ece974
--- /dev/null
+++ b/configs/Code/User/snippets/rust.json
@@ -0,0 +1,31 @@
+{
+ "Constructor": {
+ "prefix": "new",
+ "body": [
+ "/// Construct a new $1",
+ "pub fn new($2) -> Self {",
+ "\tSelf {",
+ "\t\t$3",
+ "\t}",
+ "}"
+ ]
+ },
+ "Derive Macro": {
+ "prefix": "derive",
+ "body": "#[derive(Debug, $1)]$2"
+ },
+ "Unit Tests": {
+ "prefix": "cfg: test",
+ "body": [
+ "#[cfg(test)]",
+ "mod tests {",
+ "\tuse super::*;",
+ "\t",
+ "\t#[test]",
+ "\tfn test_$1() {",
+ "\t\t$2",
+ "\t}",
+ "}"
+ ]
+ }
+}
\ No newline at end of file
diff --git a/configs/Code/User/snippets/toml.json b/configs/Code/User/snippets/toml.json
new file mode 100644
index 0000000..be8de87
--- /dev/null
+++ b/configs/Code/User/snippets/toml.json
@@ -0,0 +1,15 @@
+{
+ "Inculde: Serde": {
+ "prefix": "serde",
+ "body": [
+ "serde = { version = \"^1.0\", features = [\"derive\"] }",
+ "serde_json = \"^1.0\""
+ ]
+ },
+ "Include: Tokio": {
+ "prefix": "tokio",
+ "body": [
+ "tokio = { version = \"$1\", features = [\"macros\", \"rt-multi-thread\"] }$2"
+ ]
+ },
+}
\ No newline at end of file
diff --git a/configs/blender/3.x/scripts/addons/y_aligned_camera.py b/configs/blender/3.x/scripts/addons/y_aligned_camera.py
new file mode 100644
index 0000000..cb60c4e
--- /dev/null
+++ b/configs/blender/3.x/scripts/addons/y_aligned_camera.py
@@ -0,0 +1,54 @@
+bl_info = {
+ "name": "Evan's Y-aligned Camera Creator",
+ "author": "Evan Pratten ",
+ "version": (1, 0),
+ "blender": (3, 0, 0),
+ "description": "Adds a camera that is aligned with the Y axis by default",
+ "category": "General",
+}
+
+import bpy
+from bpy.types import Operator
+from bpy_extras.object_utils import AddObjectHelper
+import math
+
+
+class OBJECT_OT_add_object(Operator, AddObjectHelper):
+ """Create a new Camera Object facing +Y"""
+
+ bl_idname = "mesh.add_y_camera"
+ bl_label = "Add Camera Object facing +Y"
+ bl_options = {"REGISTER", "UNDO"}
+
+ def execute(self, _):
+ print("[+Y Camera] Creating new camera and adding to scene at origin")
+
+ # Create a new camera, facing +Y
+ camera_data = bpy.data.cameras.new(name="Camera")
+ camera_object = bpy.data.objects.new("Camera", camera_data)
+ camera_object.rotation_euler[0] = math.radians(90)
+
+ # Add the camera to the scene
+ bpy.context.scene.collection.objects.link(camera_object)
+
+ return {"FINISHED"}
+
+
+def blender_button_add_y_camera(obj, _):
+ obj.layout.operator(
+ OBJECT_OT_add_object.bl_idname, text="+Y Camera", icon="CAMERA_DATA"
+ )
+
+
+def register():
+ bpy.utils.register_class(OBJECT_OT_add_object)
+ bpy.types.VIEW3D_MT_add.append(
+ lambda obj, ctx: blender_button_add_y_camera(obj, ctx)
+ )
+
+
+def unregister():
+ bpy.utils.unregister_class(OBJECT_OT_add_object)
+ bpy.types.VIEW3D_MT_add.remove(
+ lambda obj, ctx: blender_button_add_y_camera(obj, ctx)
+ )
diff --git a/configs/cargo/config.toml b/configs/cargo/config.toml
new file mode 100644
index 0000000..656e08b
--- /dev/null
+++ b/configs/cargo/config.toml
@@ -0,0 +1,2 @@
+[net]
+git-fetch-with-cli = true
\ No newline at end of file
diff --git a/configs/git/.gitconfig b/configs/git/.gitconfig
new file mode 100644
index 0000000..2dc0487
--- /dev/null
+++ b/configs/git/.gitconfig
@@ -0,0 +1,52 @@
+[include]
+ # NOTE: These paths are imported if they exist and ignored if they don't
+ # This allows us to have a single config file for all our machines
+ # and control the specifics by symlinking the relevant files per-machine
+ path = ~/.config/git/config-fragments/global-mailmap.gitconfig
+ path = ~/.config/git/config-fragments/personal-info.gitconfig
+ path = ~/.config/git/config-fragments/enable-signing.gitconfig
+
+[init]
+ defaultBranch = master
+
+[pull]
+ rebase = false
+
+[advice]
+ detachedHead = true
+
+[alias]
+ authors = shortlog --summary --numbered --email
+ tree = log --graph --decorate --abbrev-commit --all \
+ --pretty=format:'%C(yellow)commit %h%C(auto)%d%n%C(cyan)Author:%Creset %aN %C(dim white)<%aE>%n%C(cyan)Date:%Creset %C(dim white)%ad (%ar)%n%s%n' \
+ --date=format:'%b %d %Y %H:%M:%S %z'
+ branches = branch -a -l -vv
+ overview = log --all --pretty=format:'%C(green)commit %C(yellow)%h%C(green) by %C(reset)%C(yellow)%aN %C(dim white)(%ar) %n%C(dim white)%S%n%B%n'
+ lscommits = ! ( echo -e "Commits\tFile" && git log --pretty=format: --name-only | sed '/^$/d' | sort | uniq -c | sort -g -r ) | less
+ lsc = lscommits
+ diff-against = diff --merge-base
+ fix-recreated-branch = reset --hard @{u}
+ fa = fetch --all
+ pa = pull --all
+ c = commit
+ aa = add .
+
+[filter "lfs"]
+ clean = git-lfs clean -- %f
+ smudge = git-lfs smudge -- %f
+ process = git-lfs filter-process
+ required = true
+
+[url "ssh://git@github.com/"]
+ pushInsteadOf = https://github.com/
+
+[credential "https://github.com"]
+ helper =
+ helper = !/usr/bin/gh auth git-credential
+
+[credential "https://gist.github.com"]
+ helper =
+ helper = !/usr/bin/gh auth git-credential
+
+[push]
+ autoSetupRemote = true
diff --git a/configs/git/.mailmap b/configs/git/.mailmap
new file mode 100644
index 0000000..992bb9a
--- /dev/null
+++ b/configs/git/.mailmap
@@ -0,0 +1,7 @@
+Evan Pratten
+Evan Pratten
+William Meathrel
+Carter Tomlenovich
+James Nickoli
+Sam Lownie
+Sam Lownie <30960735+slownie@users.noreply.github.com>
\ No newline at end of file
diff --git a/configs/git/config-fragments/enable-signing.gitconfig b/configs/git/config-fragments/enable-signing.gitconfig
new file mode 100644
index 0000000..f392a2a
--- /dev/null
+++ b/configs/git/config-fragments/enable-signing.gitconfig
@@ -0,0 +1,6 @@
+[gpg]
+ format = ssh
+
+[gpg "ssh"]
+ allowedSignersFile = ~/.ssh/allowed_signers
+ defaultKeyCommand = ssh-add -L
\ No newline at end of file
diff --git a/configs/git/config-fragments/global-mailmap.gitconfig b/configs/git/config-fragments/global-mailmap.gitconfig
new file mode 100644
index 0000000..4953fee
--- /dev/null
+++ b/configs/git/config-fragments/global-mailmap.gitconfig
@@ -0,0 +1,4 @@
+# Feel free to overwrite this file. It is only copied once.
+
+[mailmap]
+ file = ~/.config/git/.mailmap
diff --git a/configs/git/config-fragments/personal-info.gitconfig b/configs/git/config-fragments/personal-info.gitconfig
new file mode 100644
index 0000000..1b89904
--- /dev/null
+++ b/configs/git/config-fragments/personal-info.gitconfig
@@ -0,0 +1,10 @@
+[user]
+ email = evan@ewpratten.com
+ name = Evan Pratten
+ signingkey = ~/.ssh/id_ed25519_sk_rk_yk20572395
+
+[sendemail]
+ smtpserver = smtp.migadu.com
+ smtpuser = evan@ewpratten.com
+ smtpencryption = tls
+ smtpserverport = 587
diff --git a/configs/gnome/desktop-settings.sh b/configs/gnome/desktop-settings.sh
new file mode 100644
index 0000000..c9b2de1
--- /dev/null
+++ b/configs/gnome/desktop-settings.sh
@@ -0,0 +1,79 @@
+#! /bin/sh
+# This script configures GNOME to my liking
+set -e
+
+# Require gsettings
+if ! command -v gsettings >/dev/null 2>&1; then
+ echo "gsettings is not installed, skipping some GNOME configuration"
+ exit 0
+fi
+
+# Mouse settings
+gsettings set org.gnome.desktop.interface gtk-enable-primary-paste true # Middle click paste
+gsettings set org.gnome.desktop.peripherals.touchpad disable-while-typing false # Allow touchpad while typing
+gsettings set org.gnome.desktop.peripherals.touchpad natural-scroll false # Disable natural scrolling on touchpad
+gsettings set org.gnome.desktop.peripherals.touchpad tap-to-click true # Enable tap-to-click on touchpad
+gsettings set org.gnome.desktop.interface show-battery-percentage true # Show battery percentage
+
+# Disable application switching with Super+num keyy
+gsettings set org.gnome.shell.keybindings switch-to-application-1 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-2 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-3 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-4 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-5 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-6 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-7 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-8 "[]"
+gsettings set org.gnome.shell.keybindings switch-to-application-9 "[]"
+
+# Keyboard settings
+gsettings set org.gnome.desktop.wm.keybindings close "['q']" # Close windows with Mod+Shift+q
+gsettings set org.gnome.desktop.wm.keybindings move-to-workspace-1 "['exclam']" # Move a window to ws 1
+gsettings set org.gnome.desktop.wm.keybindings move-to-workspace-2 "['at']" # Move a window to ws 2
+gsettings set org.gnome.desktop.wm.keybindings move-to-workspace-3 "['numbersign']" # Move a window to ws 3
+gsettings set org.gnome.desktop.wm.keybindings move-to-workspace-4 "['dollar']" # Move a window to ws 4
+gsettings set org.gnome.desktop.wm.keybindings switch-to-workspace-1 "['1']" # Switch to ws 1
+gsettings set org.gnome.desktop.wm.keybindings switch-to-workspace-2 "['2']" # Switch to ws 2
+gsettings set org.gnome.desktop.wm.keybindings switch-to-workspace-3 "['3']" # Switch to ws 3
+gsettings set org.gnome.desktop.wm.keybindings switch-to-workspace-4 "['4']" # Switch to ws 4
+
+# Match the Windows screenshot behavior while also allowing the regular print screen key to work
+gsettings set org.gnome.shell.keybindings show-screenshot-ui "['s', 'Print']"
+
+# Generate custom keybinds if they do not yet exist
+keybindings=$(gsettings get org.gnome.settings-daemon.plugins.media-keys custom-keybindings)
+if [ "$keybindings" = "[]" ] || [ "$keybindings" = "@as []" ]; then
+ # Define the list of custom keybindings
+ gsettings set org.gnome.settings-daemon.plugins.media-keys custom-keybindings "['/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom0/', '/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom1/' , '/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom2/']"
+
+ # Allow Mod+Enter to open a terminal
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom0/ name "Terminal"
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom0/ command "gnome-terminal"
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom0/ binding "Return"
+
+ # Allow Mod+d to launch rofi
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom1/ name "Rofi"
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom1/ command "rofi -show drun"
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom1/ binding "d"
+
+ # Allow Mod+Shift+Enter to open python
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom2/ name "Python REPL"
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom2/ command "gnome-terminal -- $EWCONFIG_ROOT/.config/ewconfig/scripts/tinker"
+ gsettings set org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom2/ binding "Return"
+fi
+
+# Top Bar settings
+gsettings set org.gnome.desktop.interface clock-format 24h # 24 hour clock
+gsettings set org.gnome.desktop.interface clock-show-date true # Show date in top bar
+gsettings set org.gnome.desktop.interface clock-show-weekday true # Show weekday in top bar
+
+# Window settings
+gsettings set org.gnome.desktop.wm.preferences focus-mode 'sloppy' # Focus windows on mouse hover
+gsettings set org.gnome.desktop.wm.preferences auto-raise false # Don't auto-raise windows
+
+# Desktop settings
+gsettings set org.gnome.desktop.interface enable-hot-corners false # Disable hot corners
+gsettings set org.gnome.mutter edge-tiling true # Enable edge tiling
+gsettings set org.gnome.mutter dynamic-workspaces false # Use a fixed number of workspaces
+gsettings set org.gnome.desktop.wm.preferences num-workspaces 4 # Use 4 workspaces
+gsettings set org.gnome.mutter workspaces-only-on-primary true # Only use workspaces on primary monitor
diff --git a/configs/gnome/gnome-terminal-settings.sh b/configs/gnome/gnome-terminal-settings.sh
new file mode 100644
index 0000000..51b0601
--- /dev/null
+++ b/configs/gnome/gnome-terminal-settings.sh
@@ -0,0 +1,15 @@
+#! /bin/sh
+set -e
+
+# Get the path to this script
+SCRIPT_PATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
+
+# Check if dconf is available
+if ! command -v dconf >/dev/null 2>&1; then
+ echo "dconf is not installed, skipping GNOME configuration"
+ exit 0
+fi
+
+# Configure gnome-terminal
+echo "Writing gnome-terminal settings..."
+dconf load "/org/gnome/terminal/" < $SCRIPT_PATH/terminal/terminal.dconf
diff --git a/configs/gnome/terminal/terminal.dconf b/configs/gnome/terminal/terminal.dconf
new file mode 100644
index 0000000..c563b92
--- /dev/null
+++ b/configs/gnome/terminal/terminal.dconf
@@ -0,0 +1,7 @@
+[legacy/profiles:/:b1dcc9dd-5262-4d8d-a863-c897e6d979b9]
+custom-command='/usr/bin/zsh'
+default-size-columns=100
+default-size-rows=40
+login-shell=true
+use-custom-command=true
+visible-name='Default'
diff --git a/configs/helix/config.toml b/configs/helix/config.toml
new file mode 100644
index 0000000..0f3ee7c
--- /dev/null
+++ b/configs/helix/config.toml
@@ -0,0 +1,13 @@
+[editor]
+mouse = true
+middle-click-paste = true
+line-number = "absolute"
+auto-completion = true
+auto-format = false
+
+[editor.cursor-shape]
+normal = "block"
+insert = "bar"
+select = "block"
+
+
diff --git a/configs/houdini19.5/scripts/456.cmd b/configs/houdini19.5/scripts/456.cmd
new file mode 100644
index 0000000..4101639
--- /dev/null
+++ b/configs/houdini19.5/scripts/456.cmd
@@ -0,0 +1 @@
+preference general.desk.val "Solaris"
\ No newline at end of file
diff --git a/configs/houdini19.5/scripts/post_frame_usdnc_to_usd.py b/configs/houdini19.5/scripts/post_frame_usdnc_to_usd.py
new file mode 100644
index 0000000..22ed009
--- /dev/null
+++ b/configs/houdini19.5/scripts/post_frame_usdnc_to_usd.py
@@ -0,0 +1,17 @@
+from pxr import Usd
+from pathlib import Path
+
+# Figure out what file just got exported
+# NOTE: The filename the user enters is not the actual filename due to the NC suffix
+output_file_field = Path(hou.pwd().parm("lopoutput").eval())
+rendered_file = output_file_field.with_suffix(".usdnc")
+print(f"[USDNC To USD]: Converting {rendered_file} to USD")
+
+# Load the rendered stage
+print("[USDNC To USD]: Loading stage")
+stage = Usd.Stage.Open(str(rendered_file))
+
+# Write it again with the appropriate extension
+output_file = rendered_file.with_suffix(output_file_field.suffix)
+print(f"[USDNC To USD]: Exporting to: {output_file}")
+stage.Export(str(output_file))
diff --git a/configs/logid/logid.cfg b/configs/logid/logid.cfg
new file mode 100644
index 0000000..1e50b69
--- /dev/null
+++ b/configs/logid/logid.cfg
@@ -0,0 +1,54 @@
+devices: (
+ {
+ name: "M720 Triathlon Multi-Device Mouse",
+ hiresscroll: {
+ hires: true,
+ invert: false,
+ target: false
+ },
+ dpi: 1000,
+
+ buttons: (
+ # Back
+ {
+ cid: 0x53,
+ action: {
+ type: "None"
+ }
+ },
+
+ # Forward
+ {
+ cid: 0x56,
+ action: {
+ type: "None"
+ }
+ },
+
+ # Left Scroll
+ {
+ cid: 0x5b,
+ action: {
+ type: "Keypress",
+ keys: [ "BTN_MIDDLE" ]
+ }
+ },
+
+ # Right Scroll
+ {
+ cid: 0x5d,
+ action: {
+ type: "None"
+ }
+ },
+
+ # Squeeze
+ {
+ cid: 0xd0,
+ action: {
+ type: "None"
+ }
+ },
+ )
+ }
+);
\ No newline at end of file
diff --git a/configs/memegen/fonts/impact.ttf b/configs/memegen/fonts/impact.ttf
new file mode 100644
index 0000000..114e6c1
Binary files /dev/null and b/configs/memegen/fonts/impact.ttf differ
diff --git a/configs/memegen/templates/bernie-asking/config.json b/configs/memegen/templates/bernie-asking/config.json
new file mode 100644
index 0000000..5a2f0f4
--- /dev/null
+++ b/configs/memegen/templates/bernie-asking/config.json
@@ -0,0 +1,17 @@
+{
+ "font": "impact.ttf",
+ "fill_color": [255, 255, 255],
+ "stroke_color": [0, 0, 0],
+ "stroke_width": 2,
+ "zones": {
+ "bottom": {
+ "horizontal_align": "center",
+ "horizontal_offset": 0,
+ "vertical_align": "bottom",
+ "vertical_offset": -50,
+ "width": "80%",
+ "max_line_height": 50,
+ "line_spacing": 5
+ }
+ }
+}
\ No newline at end of file
diff --git a/configs/memegen/templates/bernie-asking/template.png b/configs/memegen/templates/bernie-asking/template.png
new file mode 100644
index 0000000..0d19bb4
Binary files /dev/null and b/configs/memegen/templates/bernie-asking/template.png differ
diff --git a/configs/memegen/templates/hotline-bling/config.json b/configs/memegen/templates/hotline-bling/config.json
new file mode 100644
index 0000000..cded53e
--- /dev/null
+++ b/configs/memegen/templates/hotline-bling/config.json
@@ -0,0 +1,34 @@
+{
+ "font": "impact.ttf",
+ "fill_color": [
+ 0,
+ 0,
+ 0
+ ],
+ "stroke_color": [
+ 0,
+ 0,
+ 0
+ ],
+ "stroke_width": 0,
+ "zones": {
+ "top": {
+ "horizontal_align": "right",
+ "horizontal_offset": -5,
+ "vertical_align": "top",
+ "vertical_offset": 75,
+ "width": "45%",
+ "max_line_height": 50,
+ "line_spacing": 5
+ },
+ "bottom": {
+ "horizontal_align": "right",
+ "horizontal_offset": -5,
+ "vertical_align": "bottom",
+ "vertical_offset": -75,
+ "width": "45%",
+ "max_line_height": 50,
+ "line_spacing": 5
+ }
+ }
+}
\ No newline at end of file
diff --git a/configs/memegen/templates/hotline-bling/template.png b/configs/memegen/templates/hotline-bling/template.png
new file mode 100644
index 0000000..7002060
Binary files /dev/null and b/configs/memegen/templates/hotline-bling/template.png differ
diff --git a/configs/memegen/templates/megamind/config.json b/configs/memegen/templates/megamind/config.json
new file mode 100644
index 0000000..7ca4dbc
--- /dev/null
+++ b/configs/memegen/templates/megamind/config.json
@@ -0,0 +1,17 @@
+{
+ "font": "impact.ttf",
+ "fill_color": [255, 255, 255],
+ "stroke_color": [0, 0, 0],
+ "stroke_width": 2,
+ "zones": {
+ "top": {
+ "horizontal_align": "center",
+ "horizontal_offset": 0,
+ "vertical_align": "top",
+ "vertical_offset": 5,
+ "width": "80%",
+ "max_line_height": 80,
+ "line_spacing": 5
+ }
+ }
+}
\ No newline at end of file
diff --git a/configs/memegen/templates/megamind/template.png b/configs/memegen/templates/megamind/template.png
new file mode 100644
index 0000000..52bf5e5
Binary files /dev/null and b/configs/memegen/templates/megamind/template.png differ
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_[§§]/config.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_[§§]/config.txt
new file mode 100644
index 0000000..390744a
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_[§§]/config.txt
@@ -0,0 +1,14 @@
+//waypoints config options
+usingMultiworldDetection:false
+ignoreServerLevelId:false
+defaultMultiworldId:mw-4,0,2
+teleportationEnabled:true
+usingDefaultTeleportCommand:true
+sortType:NONE
+sortReversed:false
+
+//other config options
+ignoreHeightmaps:false
+
+//dimension types (DO NOT EDIT)
+dimensionType:minecraft$overworld:minecraft$overworld
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_[§§]/dim%0/mw$default_1.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_[§§]/dim%0/mw$default_1.txt
new file mode 100644
index 0000000..fa0d9c8
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_[§§]/dim%0/mw$default_1.txt
@@ -0,0 +1,3 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:visibility_type:destination
+#
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.ewpratten.com/config.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.ewpratten.com/config.txt
new file mode 100644
index 0000000..64c2a5c
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.ewpratten.com/config.txt
@@ -0,0 +1,14 @@
+//waypoints config options
+usingMultiworldDetection:false
+ignoreServerLevelId:false
+defaultMultiworldId:mw-13,1,0
+teleportationEnabled:true
+usingDefaultTeleportCommand:true
+sortType:NONE
+sortReversed:false
+
+//other config options
+ignoreHeightmaps:false
+
+//dimension types (DO NOT EDIT)
+dimensionType:minecraft$overworld:minecraft$overworld
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.ewpratten.com/dim%0/mw$default_1.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.ewpratten.com/dim%0/mw$default_1.txt
new file mode 100644
index 0000000..b892127
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.ewpratten.com/dim%0/mw$default_1.txt
@@ -0,0 +1,7 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:visibility_type:destination
+#
+waypoint:horse parking:H:970:68:2059:6:false:0:gui.xaero_default:false:0:0:false
+waypoint:Home:H:-1647:98:-339:2:false:0:gui.xaero_default:false:0:0:false
+waypoint:Village:V:-1575:64:-409:5:false:0:gui.xaero_default:false:0:0:false
+waypoint:Spawn:S:-762:77:77:0:false:0:gui.xaero_default:false:0:0:false
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/config.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/config.txt
new file mode 100644
index 0000000..7b27802
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/config.txt
@@ -0,0 +1,7 @@
+usingMultiworldDetection:true
+ignoreServerLevelId:false
+teleportationEnabled:true
+usingDefaultTeleportCommand:true
+sortType:NONE
+sortReversed:false
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_-1/mw$default_2.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_-1/mw$default_2.txt
new file mode 100644
index 0000000..a592213
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_-1/mw$default_2.txt
@@ -0,0 +1,17 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:global
+#
+waypoint:Anarchy:A:79:128:328:13:false:0:gui.xaero_default:false:0:false
+waypoint:Other roof hole:O:147:128:296:1:false:0:gui.xaero_default:false:0:false
+waypoint:Carter:C:-125:128:-164:2:false:0:gui.xaero_default:false:0:false
+waypoint:Percy:P:-42:132:223:10:false:0:gui.xaero_default:false:0:false
+waypoint:cat:C:-51:128:536:5:false:0:gui.xaero_default:false:0:false
+waypoint:Mountain Base:M:-172:131:6:2:false:0:gui.xaero_default:false:0:false
+waypoint:james:J:-63:131:-64:2:false:0:gui.xaero_default:false:0:false
+waypoint:Ladder to Top:L:-92:89:-30:2:false:0:gui.xaero_default:false:0:false
+waypoint:te:T:6:38:251:4:false:0:gui.xaero_default:false:0:false
+waypoint:ice portal:I:-650:128:-250:3:false:0:gui.xaero_default:false:0:false
+waypoint:End:E:-213:131:-78:12:false:0:gui.xaero_default:false:0:false
+waypoint:Home:H:-24:132:128:0:false:0:gui.xaero_default:false:0:false
+waypoint:Spawn Portal:S:-74:83:2:0:false:0:gui.xaero_default:false:0:false
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_-1/mw-1,0,0_3.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_-1/mw-1,0,0_3.txt
new file mode 100644
index 0000000..894f424
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_-1/mw-1,0,0_3.txt
@@ -0,0 +1,6 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:global
+#
+waypoint:jake:J:-32:131:-110:9:false:0:gui.xaero_default:false:0:false
+waypoint:Home:H:-10:132:129:5:false:0:gui.xaero_default:false:0:false
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw$default_1.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw$default_1.txt
new file mode 100644
index 0000000..cd8155b
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw$default_1.txt
@@ -0,0 +1,39 @@
+sets:gui.xaero_default:RemotePlayers_Temp:Bases
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:global
+#
+waypoint:Anarchy:A:687:32:2547:13:false:0:gui.xaero_default:false:0:false
+waypoint:Nice spot:N:257:81:319:14:false:0:gui.xaero_default:false:0:false
+waypoint:Carter:C:-994:65:-1305:10:false:0:gui.xaero_default:false:0:false
+waypoint:Ice portal:I:-5201:86:-1988:2:false:0:gui.xaero_default:false:0:false
+waypoint:Percy:P:-327:71:1801:10:false:0:gui.xaero_default:false:0:false
+waypoint:Cat's Secret Base:C:-450:64:4368:12:false:0:gui.xaero_default:false:0:false
+waypoint:Squid Farm:S:1140:64:-1625:0:false:0:gui.xaero_default:false:0:false
+waypoint:Chicken Farm:C:-267:55:150:0:false:0:gui.xaero_default:false:0:false
+waypoint:t:T:-2391:80:1305:0:false:0:gui.xaero_default:false:0:false
+waypoint:Sally:S:-1439:64:-59:10:false:0:gui.xaero_default:false:0:false
+waypoint:Skeleton Farm:S:1718:70:384:0:false:0:gui.xaero_default:false:0:false
+waypoint:CommonOctopus:C:1662:68:306:10:false:0:gui.xaero_default:false:0:false
+waypoint:Totem farm:T:-38:71:1656:0:false:0:gui.xaero_default:false:0:false
+waypoint:Wills monument:W:-1909:56:798:3:false:0:gui.xaero_default:false:0:false
+waypoint:Monument:M:2799:52:1376:3:false:0:gui.xaero_default:false:0:false
+waypoint:Will:W:-483:62:338:10:false:0:gui.xaero_default:false:0:false
+waypoint:Tyson:T:3298:178:-2820:10:false:0:gui.xaero_default:false:0:false
+waypoint:Sydney:S:-1748:110:320:10:false:0:gui.xaero_default:false:0:false
+waypoint:Ethan:E:-1447:67:-830:10:false:0:gui.xaero_default:false:0:false
+waypoint:Ian:I:162:105:-125:10:false:0:gui.xaero_default:false:0:false
+waypoint:Cat:C:-278:70:-290:10:false:0:gui.xaero_default:false:0:false
+waypoint:Turtles:T:-1761:65:40:15:false:0:gui.xaero_default:false:0:false
+waypoint:Trident Farm:T:97:200:-2280:0:false:0:gui.xaero_default:false:0:false
+waypoint:Test Site:T:-841:65:1421:15:false:0:gui.xaero_default:false:0:false
+waypoint:Zombie Farm:Z:-475:26:1021:0:false:0:gui.xaero_default:false:0:false
+waypoint:Mountain Base:M:-1304:79:-7:10:false:0:gui.xaero_default:false:0:false
+waypoint:Jake:J:-435:69:-851:10:false:0:gui.xaero_default:false:0:false
+waypoint:James:J:-501:66:-514:10:false:0:gui.xaero_default:false:0:false
+waypoint:Nether Fortress:G:307:83:1452:6:false:0:gui.xaero_default:false:0:false
+waypoint:End Portal:E:-1712:31:-622:6:false:0:gui.xaero_default:false:0:false
+waypoint:Large Village:V:331:63:1396:4:false:0:gui.xaero_default:false:0:false
+waypoint:Evan:H:-170:63:1038:10:false:0:gui.xaero_default:false:0:false
+waypoint:Community Centre:C:-499:67:79:12:false:0:gui.xaero_default:false:0:false
+waypoint:test:T:-144:68:1024:5:false:0:Bases:false:0:false
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw$old_3.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw$old_3.txt
new file mode 100644
index 0000000..3513847
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw$old_3.txt
@@ -0,0 +1,6 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:global
+#
+waypoint:gui.xaero_deathpoint:D:-471:65:76:0:false:1:gui.xaero_default:false:0:true
+waypoint:gui.xaero_deathpoint_old:D:-200:70:0:0:false:0:gui.xaero_default:false:0:true
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw-4,0,2_2.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw-4,0,2_2.txt
new file mode 100644
index 0000000..ec14331
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_0/mw-4,0,2_2.txt
@@ -0,0 +1,4 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:global
+#
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_1/mw$default_1.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_1/mw$default_1.txt
new file mode 100644
index 0000000..5bd4f09
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.rsninja.dev/dim_1/mw$default_1.txt
@@ -0,0 +1,10 @@
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:global
+#
+waypoint:e:E:392:49:-2200:2:false:0:gui.xaero_default:false:0:false
+waypoint:XP Farm:X:375:1:-8:0:false:0:gui.xaero_default:false:0:false
+waypoint:Portal:P:0:68:0:5:false:0:gui.xaero_default:false:0:false
+waypoint:another portal:A:289:60:1365:2:false:0:gui.xaero_default:false:0:false
+waypoint:island:I:-576:62:1050:3:false:0:gui.xaero_default:false:0:false
+waypoint:island:I:-576:62:1050:12:false:0:gui.xaero_default:false:0:false
+
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.sdf.org/config.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.sdf.org/config.txt
new file mode 100644
index 0000000..9ee7aa3
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.sdf.org/config.txt
@@ -0,0 +1,17 @@
+//waypoints config options
+usingMultiworldDetection:false
+ignoreServerLevelId:false
+defaultMultiworldId:mw-3,1,-4
+teleportationEnabled:true
+usingDefaultTeleportCommand:false
+serverTeleportCommandFormat:^col^warp add {x},{y},{z}
+serverTeleportCommandRotationFormat:^col^warp add {x},{y},{z}
+sortType:NONE
+sortReversed:false
+
+//other config options
+ignoreHeightmaps:false
+
+//dimension types (DO NOT EDIT)
+dimensionType:minecraft$overworld:minecraft$overworld
+dimensionType:minecraft$the_end:minecraft$the_end
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.sdf.org/dim%0/mw$default_1.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.sdf.org/dim%0/mw$default_1.txt
new file mode 100644
index 0000000..03f7994
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.sdf.org/dim%0/mw$default_1.txt
@@ -0,0 +1,115 @@
+sets:gui.xaero_default:Buildings:Subway Stations
+#
+#waypoint:name:initials:x:y:z:color:disabled:type:set:rotate_on_tp:tp_yaw:visibility_type:destination
+#
+waypoint:Iron Farm:I:-186:57:-217:8:false:0:Buildings:false:0:0:false
+waypoint:Spawn Inn:S:-136:70:-233:8:false:0:Buildings:false:0:0:false
+waypoint:Kenny Rogers Roaster:K:-156:70:-233:8:false:0:Buildings:false:0:0:false
+waypoint:ITC:I:-159:70:-223:8:false:0:Buildings:false:0:0:false
+waypoint:Dojo St Cafe:D:-239:67:-193:8:false:0:Buildings:false:0:0:false
+waypoint:Jacob's Residence:J:-221:69:-220:8:false:0:Buildings:false:0:0:false
+waypoint:Dojo St Subway Station:D:-235:69:-232:8:false:0:Buildings:false:0:0:false
+waypoint:Schwa Corp HQ:S:-238:71:-254:8:false:0:Buildings:false:0:0:false
+waypoint:Northern & Bee Station:S:-252:56:-433:8:false:0:Subway Stations:false:0:0:false
+waypoint:Monument Place Station:S:-220:95:-186:12:false:0:Subway Stations:false:0:0:false
+waypoint:[ZOG] Zombie Grinder Station:S:-236:3:-181:14:false:0:Subway Stations:false:0:0:false
+waypoint:[PMI] Prismarine Inn Station:S:-204:63:578:5:false:0:Subway Stations:false:0:0:false
+waypoint:[DSW] Dismal Swamp Station:S:-322:63:364:5:false:0:Subway Stations:false:0:0:false
+waypoint:[APY] Apiary Station:S:-322:63:41:5:false:0:Subway Stations:false:0:0:false
+waypoint:[APY] Apiary Station:S:-316:60:43:15:false:0:Subway Stations:false:0:0:false
+waypoint:Southwest Blvd:S:-270:64:5:15:false:0:Subway Stations:false:0:0:false
+waypoint:Southlands:S:-263:64:-45:15:false:0:Subway Stations:false:0:0:false
+waypoint:Three Sisters:S:-263:64:-85:15:false:0:Subway Stations:false:0:0:false
+waypoint:Small Hall Station:S:-268:65:-127:15:false:0:Subway Stations:false:0:0:false
+waypoint:Monument Place Station:S:-256:65:-151:15:false:0:Subway Stations:false:0:0:false
+waypoint:Zombie Grinder Station:S:-256:65:-180:15:false:0:Subway Stations:false:0:0:false
+waypoint:DOJO St Station:S:-242:69:-227:15:false:0:Subway Stations:false:0:0:false
+waypoint:New Cornick House Station:S:-202:66:-229:15:false:0:Subway Stations:false:0:0:false
+waypoint:Wintergarden Station:S:-184:62:-243:15:false:0:Subway Stations:false:0:0:false
+waypoint:[PRU] Pine Ruins Station:S:225:38:-293:15:false:0:Subway Stations:false:0:0:false
+waypoint:[CSD] Canalside Station:S:143:38:-291:15:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Central Station:S:-213:42:-264:15:false:0:Subway Stations:false:0:0:false
+waypoint:[SWL] Southwest Landing Station:S:-510:63:137:13:false:0:Subway Stations:false:0:0:false
+waypoint:[APY] Apiary Station:S:-326:63:41:13:false:0:Subway Stations:false:0:0:false
+waypoint:[ZOG] Zombie Grinder Station:S:-229:3:-181:13:false:0:Subway Stations:false:0:0:false
+waypoint:[WCP] Whitecaps Station:S:-27:83:63:13:false:0:Subway Stations:false:0:0:false
+waypoint:[MTV] Mountain Village Station:S:-27:38:7:13:false:0:Subway Stations:false:0:0:false
+waypoint:[ESJ] Eastside Transfer:S:-31:38:-237:13:false:0:Subway Stations:false:0:0:false
+waypoint:[SOU] Southlands Terminal:S:-245:42:-49:14:false:0:Subway Stations:false:0:0:false
+waypoint:Monument Place Station:S:-218:43:-137:14:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Central Station:S:-213:42:-252:14:false:0:Subway Stations:false:0:0:false
+waypoint:Mountain Station:S:-899:111:-607:12:false:0:Subway Stations:false:0:0:false
+waypoint:Mensa Club Station:S:-900:111:-4187:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-900:109:-2320:12:false:0:Subway Stations:false:0:0:false
+waypoint:Farmington Station:S:-1630:107:-2316:12:false:0:Subway Stations:false:0:0:false
+waypoint:Village Layover Station:S:-2143:97:-2315:12:false:0:Subway Stations:false:0:0:false
+waypoint:Twin Peaks Station:S:-2135:97:-1015:12:false:0:Subway Stations:false:0:0:false
+waypoint:Witchy Swamp Station:S:-2143:97:-1523:12:false:0:Subway Stations:false:0:0:false
+waypoint:Ocean Overlook Station:S:-2726:106:-186:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-2111:106:-186:8:false:0:Subway Stations:false:0:0:false
+waypoint:End Portal Station:S:927:97:1223:12:false:0:Subway Stations:false:0:1:false
+waypoint:Craniumslows Station:S:-1048:97:-94:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-1048:97:-186:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-1375:97:-188:12:false:0:Subway Stations:false:0:0:false
+waypoint:Sheep Station:S:-1372:97:507:12:false:0:Subway Stations:false:0:0:false
+waypoint:Cow Station:S:-1372:97:659:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-1033:98:1087:12:false:0:Subway Stations:false:0:0:false
+waypoint:South Station:S:-1372:98:1088:12:false:0:Subway Stations:false:0:0:false
+waypoint:1567 Station:S:-1568:97:909:12:false:0:Subway Stations:false:0:0:false
+waypoint:Magenta Station:S:-1372:97:908:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-980:97:907:12:false:0:Subway Stations:false:0:0:false
+waypoint:Un-Named Interchange:I:-891:95:-187:12:false:0:Subway Stations:false:0:0:false
+waypoint:Dark Oak Station:S:-700:97:-185:12:false:0:Subway Stations:false:0:0:false
+waypoint:Cat Ave Station:S:-536:96:-187:12:false:0:Subway Stations:false:0:0:false
+waypoint:End of Line:S:-220:97:1176:12:false:0:Subway Stations:false:0:0:false
+waypoint:Unknown Station:S:-220:97:6:12:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Glider Port Station:S:-219:97:-254:12:false:0:Subway Stations:false:0:0:false
+waypoint:Bell Bridge / Changa Station:S:-219:97:-375:12:false:0:Subway Stations:false:0:0:false
+waypoint:Eccentric Genius Station:S:-219:96:-481:12:false:0:Subway Stations:false:0:0:false
+waypoint:Xiled Station:S:-219:97:-552:12:false:0:Subway Stations:false:0:0:false
+waypoint:Nopantsistan Station:S:-219:98:-650:12:false:0:Subway Stations:false:0:0:false
+waypoint:[HLV] Highland Village Station:S:-220:97:-1797:12:false:0:Subway Stations:false:0:0:false
+waypoint:[MSW] Mid-Swamp Station:S:-187:41:-1254:14:false:0:Subway Stations:false:0:0:false
+waypoint:[JOT] Jotaku Station:S:-187:42:-782:14:false:0:Subway Stations:false:0:0:false
+waypoint:[NSX] Northside Transfer Station:S:-197:42:-719:5:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Central Station:S:-217:56:-275:6:false:0:Subway Stations:false:0:0:false
+waypoint:[NRV] North River Station:S:-245:38:-366:5:false:0:Subway Stations:false:0:0:false
+waypoint:[CMK] Central Market Station:S:-245:38:-258:5:false:0:Subway Stations:false:0:0:false
+waypoint:Monument Place Station:S:-219:35:-137:13:false:0:Subway Stations:false:0:0:false
+waypoint:Monument Place Station:S:-220:43:-137:10:false:0:Subway Stations:false:0:0:false
+waypoint:Survey Hall Station:S:-120:59:-481:10:false:0:Subway Stations:false:0:0:false
+waypoint:Inventory Station:S:-143:43:-304:10:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Square Station:S:-115:53:-256:10:false:0:Subway Stations:false:0:0:false
+waypoint:Tek Square Station:S:63:65:-215:9:false:0:Subway Stations:false:0:0:false
+waypoint:Manor Ave Station:S:-36:58:-225:9:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Square Station:S:-141:58:-225:9:false:0:Subway Stations:false:0:0:false
+waypoint:DOJO St Station:S:-221:56:-226:9:false:0:Subway Stations:false:0:0:false
+waypoint:Mob St Station:S:-389:64:-222:9:false:0:Subway Stations:false:0:0:false
+waypoint:[CAT] Cat Ave Station:S:-491:64:-222:9:false:0:Subway Stations:false:0:0:false
+waypoint:Garfield Station:S:-624:73:310:9:false:0:Subway Stations:false:0:0:false
+waypoint:Castle Square Station:S:-616:61:-222:9:false:0:Subway Stations:false:0:0:false
+waypoint:[CHA] Changa Station:S:-187:42:-373:14:false:0:Subway Stations:false:0:0:false
+waypoint:[ECG] Eccentric Genius:S:-187:42:-477:14:false:0:Subway Stations:false:0:0:false
+waypoint:[XIL] Xiled Station:S:-187:42:-575:14:false:0:Subway Stations:false:0:0:false
+waypoint:[NOP] Nopantsistan Station:S:-187:42:-662:14:false:0:Subway Stations:false:0:0:false
+waypoint:[NSX] Northside Transfer:S:-187:42:-720:14:false:0:Subway Stations:false:0:0:false
+waypoint:[NPN] Nopantsistan Station:S:-245:38:-665:5:false:0:Subway Stations:false:0:0:false
+waypoint:[LAY] Laydros Station:S:-245:38:-543:5:false:0:Subway Stations:false:0:0:false
+waypoint:Red Station:S:-900:109:-1992:8:false:0:Subway Stations:false:0:0:false
+waypoint:[NSC] North Shore City Terminal:S:-72:42:-1946:14:false:0:Subway Stations:false:0:0:false
+waypoint:[NSJ] North Shore Junction:J:-183:39:-1949:8:false:0:Subway Stations:false:0:0:false
+waypoint:[HLV] Highland Village Station:S:-187:41:-1736:14:false:0:Subway Stations:false:0:0:false
+waypoint:[NDK] North Docks Station:S:-187:41:-1519:14:false:0:Subway Stations:false:0:0:false
+waypoint:[BOH] Boathouse Station:S:-187:42:-839:14:false:0:Subway Stations:false:0:0:false
+waypoint:[RST] Riverside Station:S:-189:42:-321:14:false:0:Subway Stations:false:0:0:false
+waypoint:Spawn Central Station:S:-213:42:-258:13:false:0:Subway Stations:false:0:0:false
+waypoint:Slime Farm:S:4975:4:5854:1:false:0:gui.xaero_default:false:0:0:false
+waypoint:Northern Village:N:37:74:-10056:8:false:0:gui.xaero_default:false:0:0:false
+waypoint:Creeper Farm:C:5092:5:5481:9:false:0:gui.xaero_default:false:0:0:false
+waypoint:Warp Room:W:-4328:7:-2168:14:false:0:gui.xaero_default:false:0:0:false
+waypoint:New Base:N:-532:73:-2308:4:false:0:gui.xaero_default:false:0:1:false
+waypoint:Percy Home:X:-131:66:-102:9:false:0:gui.xaero_default:false:0:1:false
+waypoint:End Portal:E:921:23:1221:5:false:0:gui.xaero_default:false:0:0:false
+waypoint:Miwu Home:M:-287:77:-432:9:false:0:gui.xaero_default:false:0:1:false
+waypoint:Home:H:-198:69:-303:9:false:0:gui.xaero_default:false:0:1:false
+waypoint:Spawn:*:-137:70:-256:0:false:0:gui.xaero_default:false:0:1:false
diff --git a/configs/minecraft/XaeroWaypoints/Multiplayer_mc.yarc.world/config.txt b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.yarc.world/config.txt
new file mode 100644
index 0000000..33028ea
--- /dev/null
+++ b/configs/minecraft/XaeroWaypoints/Multiplayer_mc.yarc.world/config.txt
@@ -0,0 +1,14 @@
+//waypoints config options
+usingMultiworldDetection:false
+ignoreServerLevelId:false
+defaultMultiworldId:mw0,1,0
+teleportationEnabled:true
+usingDefaultTeleportCommand:true
+sortType:NONE
+sortReversed:false
+
+//other config options
+ignoreHeightmaps:false
+
+//dimension types (DO NOT EDIT)
+dimensionType:minecraft$overworld:minecraft$overworld
diff --git a/configs/minecraft/scripts/mc_postexit.py b/configs/minecraft/scripts/mc_postexit.py
new file mode 100644
index 0000000..1b5eb08
--- /dev/null
+++ b/configs/minecraft/scripts/mc_postexit.py
@@ -0,0 +1,50 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import os
+import shutil
+from pathlib import Path
+
+MINECRAFT_DIR = Path(os.environ["INST_MC_DIR"])
+WAYPOINT_BASE_DIR = MINECRAFT_DIR / "XaeroWaypoints"
+GLOBAL_WAYPOINT_DIR = (
+ Path(os.path.expanduser("~")) / ".config" / "minecraft" / "XaeroWaypoints"
+)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(description="Post-exit tasks for Minecraft")
+ args = ap.parse_args()
+ print("[EWCONFIG] Executing post-exit tasks for Minecraft")
+ print(f"[EWCONFIG] Minecraft directory: {MINECRAFT_DIR}")
+
+ # If the waypoint base dir doesn't exist, we don't need to do anything
+ if not WAYPOINT_BASE_DIR.exists():
+ print("[EWCONFIG] No waypoints to sync")
+ return 0
+
+ # Find all multiplayer waypoint dirs
+ multiplayer_waypoints = [
+ directory
+ for directory in WAYPOINT_BASE_DIR.iterdir()
+ if directory.is_dir() and directory.name.startswith("Multiplayer")
+ ]
+ print(
+ f"[EWCONFIG] Found {len(multiplayer_waypoints)} multiplayer waypoint directories"
+ )
+
+ # Copy the contents of each multiplayer waypoint dir to the global storage
+ for waypoint_dir in multiplayer_waypoints:
+ dest_dir = GLOBAL_WAYPOINT_DIR / waypoint_dir.name
+ print(f"[EWCONFIG] Copying {waypoint_dir} to {dest_dir}")
+
+ # Use shutil to copy the directory
+ dest_dir.mkdir(parents=True, exist_ok=True)
+ shutil.copytree(waypoint_dir, dest_dir, dirs_exist_ok=True)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/configs/minecraft/scripts/mc_prelaunch.py b/configs/minecraft/scripts/mc_prelaunch.py
new file mode 100644
index 0000000..20ed4d2
--- /dev/null
+++ b/configs/minecraft/scripts/mc_prelaunch.py
@@ -0,0 +1,31 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import os
+import shutil
+from pathlib import Path
+
+MINECRAFT_DIR = Path(os.environ["INST_MC_DIR"])
+WAYPOINT_BASE_DIR = MINECRAFT_DIR / "XaeroWaypoints"
+GLOBAL_WAYPOINT_DIR = (
+ Path(os.path.expanduser("~")) / ".config" / "minecraft" / "XaeroWaypoints"
+)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(description="Pre-launch tasks for Minecraft")
+ args = ap.parse_args()
+ print("[EWCONFIG] Executing pre-launch tasks for Minecraft")
+ print(f"[EWCONFIG] Minecraft directory: {MINECRAFT_DIR}")
+
+ # Copy the global waypoint dir on top of the base waypoint dir
+ print(f"[EWCONFIG] Copying {GLOBAL_WAYPOINT_DIR} to {WAYPOINT_BASE_DIR}")
+ WAYPOINT_BASE_DIR.mkdir(parents=True, exist_ok=True)
+ shutil.copytree(GLOBAL_WAYPOINT_DIR, WAYPOINT_BASE_DIR, dirs_exist_ok=True)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/configs/nautilus/scripts/Copy to web b/configs/nautilus/scripts/Copy to web
new file mode 100755
index 0000000..9207a42
--- /dev/null
+++ b/configs/nautilus/scripts/Copy to web
@@ -0,0 +1,36 @@
+#! /bin/bash
+set -e
+
+WEBSERVER_PATH=$HOME/www
+
+# If NAUTILUS_SCRIPT_SELECTED_FILE_PATHS is empty, error and exit
+if [ -z "$NAUTILUS_SCRIPT_SELECTED_FILE_PATHS" ]; then
+ notify-send "Copy to web" "No local files selected"
+ exit 1
+fi
+
+# For every file in NAUTILUS_SCRIPT_SELECTED_FILE_PATHS
+echo "$NAUTILUS_SCRIPT_SELECTED_FILE_PATHS" | while read file; do
+ # Get the last segment of the path
+ filename=$(basename "$file")
+
+ # If the file comes from ~/Pictures/Screenshots, use a path in ~/$WEBSERVER_PATH/screenshots
+ if [[ "$file" == "$HOME/Pictures/Screenshots/"* ]]; then
+ OUTPUT_PATH="$WEBSERVER_PATH/screenshots/$filename"
+ RES_PATH="/screenshots/$filename"
+ mkdir -p "$WEBSERVER_PATH/screenshots"
+ else
+ OUTPUT_PATH="$WEBSERVER_PATH/$filename"
+ RES_PATH="/$filename"
+ fi
+
+ # Copy the file to the webserver
+ cp -r "$file" "$OUTPUT_PATH"
+
+ # Write the resource path to the clipbaord
+ echo -n "$RES_PATH" | xsel -i -b
+
+done
+
+# Show a success message
+notify-send "Copy to web" "Files copied to ~/www"
diff --git a/configs/nautilus/scripts/Open in Video Trimmer b/configs/nautilus/scripts/Open in Video Trimmer
new file mode 100755
index 0000000..937a2f9
--- /dev/null
+++ b/configs/nautilus/scripts/Open in Video Trimmer
@@ -0,0 +1,4 @@
+#! /bin/bash
+set -e
+
+python3 ~/.config/ewconfig/scripts/video_trimmer
\ No newline at end of file
diff --git a/configs/nautilus/scripts/Open with USDView b/configs/nautilus/scripts/Open with USDView
new file mode 100755
index 0000000..67120d5
--- /dev/null
+++ b/configs/nautilus/scripts/Open with USDView
@@ -0,0 +1,5 @@
+#! /bin/sh
+set -e
+
+usdview $NAUTILUS_SCRIPT_SELECTED_FILE_PATHS
+
diff --git a/configs/nvim/.gitignore b/configs/nvim/.gitignore
new file mode 100644
index 0000000..aff9f31
--- /dev/null
+++ b/configs/nvim/.gitignore
@@ -0,0 +1,2 @@
+/spell
+
diff --git a/configs/nvim/init.vim b/configs/nvim/init.vim
new file mode 100644
index 0000000..cab8e5d
--- /dev/null
+++ b/configs/nvim/init.vim
@@ -0,0 +1,57 @@
+source ~/.vimrc
+
+" Configure the right-click menu
+if !exists('g:vscode')
+ aunmenu PopUp
+ vnoremenu PopUp.Cut "+x
+ vnoremenu PopUp.Copy "+y
+ anoremenu PopUp.Paste "+gP
+ vnoremenu PopUp.Paste "+P
+ vnoremenu PopUp.Delete "_x
+ nnoremenu PopUp.Select\ All> ggVG
+ vnoremenu PopUp.Select\ All> gg0oG$
+ inoremenu PopUp.Select\ All VG
+endif
+
+" Custom syntax highlighting
+au BufRead,BufNewFile *.usd set filetype=usda
+au BufRead,BufNewFile *.usda set filetype=usda
+autocmd FileType usda source ~/.config/nvim/third_party/usda-syntax/vim/usda.vim
+
+" Disable the gitgutter background
+let g:gitgutter_override_sign_column_highlight = 1
+highlight clear SignColumn
+
+" Make gitgutter update on file save
+if !exists('g:vscode')
+ autocmd BufWritePost * GitGutter
+endif
+
+" Enable Leap
+lua require('leap').add_default_mappings()
+
+" Enable Helix-style command suggestions
+"lua require('command-completion').setup()
+
+" Enable trailing space detection
+lua require('mini.trailspace').setup()
+
+" Configure floating window colours
+highlight Pmenu ctermbg=none ctermfg=white
+
+" VSCode-style comment toggling
+nnoremap :Commentary
+vnoremap :Commentary
+inoremap :Commentary
+
+" Custom functions
+function! MkdirAndWrite()
+ let dir = expand('%:p:h')
+ exec '!mkdir -p ' . dir
+ exec 'w'
+endfunction
+command Wmk call MkdirAndWrite()
+
+" LSP Support
+"lua require "lspconfig".rust_analyzer.setup {}
+"lua vim.keymap.set('n', 'e', vim.diagnostic.open_float)
diff --git a/configs/nvim/pack/airblade/start/vim-gitgutter b/configs/nvim/pack/airblade/start/vim-gitgutter
new file mode 160000
index 0000000..f7b9766
--- /dev/null
+++ b/configs/nvim/pack/airblade/start/vim-gitgutter
@@ -0,0 +1 @@
+Subproject commit f7b97666ae36c7b3f262f3190dbcd7033845d985
diff --git a/configs/nvim/pack/echasnovski/start/mini.trailspace b/configs/nvim/pack/echasnovski/start/mini.trailspace
new file mode 160000
index 0000000..60d0eca
--- /dev/null
+++ b/configs/nvim/pack/echasnovski/start/mini.trailspace
@@ -0,0 +1 @@
+Subproject commit 60d0eca6703b55285e33b0da00105cde50d188df
diff --git a/configs/nvim/pack/ggandor/start/leap.nvim b/configs/nvim/pack/ggandor/start/leap.nvim
new file mode 160000
index 0000000..5efe985
--- /dev/null
+++ b/configs/nvim/pack/ggandor/start/leap.nvim
@@ -0,0 +1 @@
+Subproject commit 5efe985cf68fac3b6a6dfe7a75fbfaca8db2af9c
diff --git a/configs/nvim/pack/jiangmiao/start/auto-pairs b/configs/nvim/pack/jiangmiao/start/auto-pairs
new file mode 160000
index 0000000..39f06b8
--- /dev/null
+++ b/configs/nvim/pack/jiangmiao/start/auto-pairs
@@ -0,0 +1 @@
+Subproject commit 39f06b873a8449af8ff6a3eee716d3da14d63a76
diff --git a/configs/nvim/pack/neovim/start/nvim-lspconfig b/configs/nvim/pack/neovim/start/nvim-lspconfig
new file mode 160000
index 0000000..8917d2c
--- /dev/null
+++ b/configs/nvim/pack/neovim/start/nvim-lspconfig
@@ -0,0 +1 @@
+Subproject commit 8917d2c830e04bf944a699b8c41f097621283828
diff --git a/configs/nvim/pack/smolck/start/command-completion.nvim b/configs/nvim/pack/smolck/start/command-completion.nvim
new file mode 160000
index 0000000..56c98f8
--- /dev/null
+++ b/configs/nvim/pack/smolck/start/command-completion.nvim
@@ -0,0 +1 @@
+Subproject commit 56c98f8d59a88ed96a80d43abca74a60ba31ea3a
diff --git a/configs/nvim/pack/tpope/start/commentary b/configs/nvim/pack/tpope/start/commentary
new file mode 160000
index 0000000..e87cd90
--- /dev/null
+++ b/configs/nvim/pack/tpope/start/commentary
@@ -0,0 +1 @@
+Subproject commit e87cd90dc09c2a203e13af9704bd0ef79303d755
diff --git a/configs/nvim/pack/tpope/start/repeat b/configs/nvim/pack/tpope/start/repeat
new file mode 160000
index 0000000..24afe92
--- /dev/null
+++ b/configs/nvim/pack/tpope/start/repeat
@@ -0,0 +1 @@
+Subproject commit 24afe922e6a05891756ecf331f39a1f6743d3d5a
diff --git a/configs/nvim/third_party/usda-syntax b/configs/nvim/third_party/usda-syntax
new file mode 160000
index 0000000..deb110f
--- /dev/null
+++ b/configs/nvim/third_party/usda-syntax
@@ -0,0 +1 @@
+Subproject commit deb110f0224235fcc19325b20885e566f911f84b
diff --git a/configs/python/python_startup.py b/configs/python/python_startup.py
new file mode 100644
index 0000000..0c4a03c
--- /dev/null
+++ b/configs/python/python_startup.py
@@ -0,0 +1,95 @@
+"""Python Startup file. Used to customize the Python REPL"""
+import sys
+import os
+
+# Global stuff
+IS_IN_TINKER_MODE = bool(os.environ.get("PYTHON_TINKER_MODE"))
+COLOR_ALLOWED = not bool(os.environ.get("NO_COLOR"))
+
+
+def colored_string(text: str, color: str) -> str:
+ if COLOR_ALLOWED:
+ return "\033[" + color + "m" + text + "\033[0m"
+ else:
+ return text
+
+
+# Configure the prompt
+class Prompt:
+ def __init__(self):
+ self.prompt = colored_string(">>> ", "36")
+
+ def __str__(self):
+ return self.prompt
+
+
+class MultiLinePrompt:
+ def __str__(self):
+ return colored_string("... ", "33")
+ # return " "
+
+
+# Hook up the prompts
+sys.ps1 = Prompt()
+sys.ps2 = MultiLinePrompt()
+
+# "Tinker mode" - automatically import common things
+if IS_IN_TINKER_MODE:
+ print(
+ colored_string(
+ "Running in tinker mode. Additional modules are available.", "33"
+ )
+ )
+
+ # Basics
+ import time
+ import json
+ from pathlib import Path
+ from dataclasses import dataclass
+ from typing import (
+ List,
+ Dict,
+ Tuple,
+ Set,
+ Optional,
+ Union,
+ Any,
+ Callable,
+ Iterable,
+ Generator,
+ )
+ from pprint import pprint
+ from datetime import datetime
+ from textwrap import dedent
+ from base64 import b64encode, b64decode
+
+ # Math stuff
+ try:
+ import numpy as np
+
+ np.set_printoptions(suppress=True)
+ _vec = lambda *fields: np.array([*fields])
+ pi = np.pi
+ except ImportError:
+ pass
+ try:
+ from pyquaternion import Quaternion
+ except ImportError:
+ pass
+ try:
+ import matplotlib.pyplot as plt
+ except ImportError:
+ pass
+
+
+# If we aren't in tinker mode, un-import sys and os
+if not IS_IN_TINKER_MODE:
+ del sys
+ del os
+
+# Clean up other stuff
+del IS_IN_TINKER_MODE
+del COLOR_ALLOWED
+del colored_string
+del Prompt
+del MultiLinePrompt
\ No newline at end of file
diff --git a/configs/rofi/config.rasi b/configs/rofi/config.rasi
new file mode 100644
index 0000000..4315809
--- /dev/null
+++ b/configs/rofi/config.rasi
@@ -0,0 +1,4 @@
+configuration {
+ show-icons: false;
+}
+@theme "dmenu"
\ No newline at end of file
diff --git a/configs/shells/bash/.bashrc b/configs/shells/bash/.bashrc
new file mode 100644
index 0000000..b7782d2
--- /dev/null
+++ b/configs/shells/bash/.bashrc
@@ -0,0 +1,20 @@
+# This is a somewhat hacky bashrc that is used to provide some of the conveniences from my zshrc on machines that I can't get zsh on
+export EWCONFIG_ROOT="$HOME/.config/ewconfig"
+
+# Show some host info
+. $EWCONFIG_ROOT/configs/shells/bash/info.sh
+
+# Load macros
+. $EWCONFIG_ROOT/configs/shells/bash/macros.sh
+
+# I always want my ~/bin to be in my PATH
+export PATH="$HOME/bin:$PATH"
+export PATH="$EWCONFIG_ROOT/scripts:$PATH"
+export PATH="$HOME/.local/bin:$PATH"
+
+# I want to be able to load my custom python modules
+export PYTHONPATH="$EWCONFIG_ROOT/python_modules:$PYTHONPATH"
+export PYTHONSTARTUP="$EWCONFIG_ROOT/configs/python/python_startup.py"
+
+# A basic prompt to display user@host dir sign
+export PS1="(${PS1_CTX:-bash}) \[\e[0;32m\]\u@\h \[\e[0;36m\]\w \[\e[0;36m\]\$ \[\e[0m\]"
diff --git a/configs/shells/bash/info.sh b/configs/shells/bash/info.sh
new file mode 100644
index 0000000..dd42aed
--- /dev/null
+++ b/configs/shells/bash/info.sh
@@ -0,0 +1,33 @@
+
+# Define red and green based on the shell
+if [ -n "$BASH_VERSION" ]; then
+ red='\033[0;31m'
+ green='\033[0;32m'
+ reset_color='\033[0m'
+ elif [ -n "$ZSH_VERSION" ]; then
+ red="$fg[red]"
+ green="$fg[green]"
+fi
+
+# Different OSes have different ways of displaying info
+if [ $(uname -s | grep -c BSD) -gt 0 ]; then # BSD
+ echo -e "${green}Platform:$reset_color $(uname -s) $(uname -r) $(uname -p)"
+
+ elif [ $(uname -o | grep -c Msys) -gt 0 ]; then # Windows
+ echo -e "${green}Platform:$reset_color $(uname -o) $(uname -r)"
+
+else # Linux-y things
+ echo -e "${green}Platform:$reset_color $(uname -o) $(uname -r)"
+ echo -e "${green}Uptime:$reset_color $(uptime -p)"
+fi
+
+# Determine if $EWCONFIG_ROOT contains uncommitted changes
+# Skip this if on Windows
+if [ -d $EWCONFIG_ROOT/.git ]; then
+ if [ $(uname -o | grep -c Msys) -eq 0 ]; then
+ if [ -n "$(git -C $EWCONFIG_ROOT status --porcelain)" ]; then
+ echo -e "${red}ewconfig contains uncommitted changes$reset_color"
+ fi
+ fi
+fi
+
diff --git a/configs/shells/bash/macros.sh b/configs/shells/bash/macros.sh
new file mode 100644
index 0000000..a030b2a
--- /dev/null
+++ b/configs/shells/bash/macros.sh
@@ -0,0 +1,283 @@
+# If ls has `--color` support
+if ls --color > /dev/null 2>&1; then
+ alias ls="ls --color=auto"
+fi
+
+# Main aliases
+alias ll="ls -l"
+alias la="ls -a"
+alias :q="exit"
+alias :wq="exit"
+alias cls=clear
+alias bashreload="source ~/.bashrc"
+alias wg-easykeys="wg genkey | tee >(wg pubkey)"
+alias nvim-tmp="nvim $(mktemp)"
+alias flush-dns="sudo systemd-resolve --flush-caches"
+alias showsizes="du -h --max-depth=1"
+alias lsgrep="ls | grep"
+alias sheridan-rdp='firefox --new-window "ext+container:name=College&url=https://client.wvd.microsoft.com/arm/webclient/index.html"'
+alias git-diff-nvim="git diff | nvim -R -d -c 'set filetype=diff' -"
+alias yk-totp="ykman oath accounts code"
+alias flush-dns-cache="sudo systemd-resolve --flush-caches"
+alias which-ls="ls -la $(which ls)"
+alias rdns="dig +short -x"
+alias ufw-status="sudo ufw status numbered"
+alias genuuid="python -c 'import uuid; print(uuid.uuid4())'"
+alias clipboard="xclip -selection clipboard"
+alias filesize="du -hs"
+alias arp-watch="sudo tcpdump -nn -tt -q \"arp and arp[6:2] == 2\""
+alias snvim="sudoedit"
+
+# WHOIS macros
+alias whois-afrinic="whois -h whois.afrinic.net"
+alias whois-altdb="whois -h whois.altdb.net"
+alias whois-aoltw="whois -h whois.aoltw.net"
+alias whois-ampr="whois -h whois.ampr.org"
+alias whois-apnic="whois -h whois.apnic.net"
+alias whois-arin="whois -h rr.arin.net"
+alias whois-bell="whois -h whois.in.bell.ca"
+alias whois-bboi="whois -h irr.bboi.net"
+alias whois-bgptools="whois -h bgp.tools"
+alias whois-canarie="whois -h whois.canarie.ca"
+alias whois-epoch="whois -h whois.epoch.net"
+alias whois-jpirr="whois -h jpirr.nic.ad.jp"
+alias whois-lacnic="whois -h irr.lacnic.net"
+alias whois-level3="whois -h rr.level3.net"
+alias whois-nestegg="whois -h whois.nestegg.net"
+alias whois-panix="whois -h rrdb.access.net"
+alias whois-radb="whois -h whois.radb.net"
+alias whois-reach="whois -h rr.telstraglobal.net"
+alias whois-ripe="whois -h whois.ripe.net"
+
+# Neo-aliases
+if [ -x "$(command -v nvim)" ]; then alias vim="nvim"; fi
+if [ -x "$(command -v neomutt)" ]; then alias mutt="neomutt"; fi
+
+# If python exists, configure an alias for python3 if needed
+if [ -x "$(command -v python)" ]; then
+ # If `python --version` starts with `Python 3`
+ if [[ $(python --version) == Python\ 3* ]]; then
+ # If we don't have python3 in our path
+ if ! command -v python3 &> /dev/null; then
+ # Make an alias for python3
+ alias python3=python
+ fi
+ fi
+fi
+
+# If we are running in a studio environment
+if [ ! -z "$EWP_IN_GURU_ENVIRONMENT" ]; then
+ alias guru_launcher3="python $GURU_PYTHON_ROOT/env/guru_launcher3.py"
+ alias cd-dev="cd /s/development/epratten"
+fi
+
+# Kill via pgrep
+nkill() {
+ if [ $# != 1 ]; then
+ echo "Usage: nkill "
+ else
+ kill -9 $(pgrep $1)
+ fi
+}
+
+# Makes a directory, then moves into it
+mkcd() {
+ if [ $# != 1 ]; then
+ echo "Usage: mkcd "
+ else
+ mkdir -p $1 && cd $1
+ fi
+}
+
+# Sources a .env
+source_env() {
+ env=${1:-.env}
+ [ ! -f "${env}" ] && { echo "Env file ${env} doesn't exist"; return 1; }
+ eval $(sed -e '/^\s*$/d' -e '/^\s*#/d' -e 's/=/="/' -e 's/$/"/' -e 's/^/export /' "${env}")
+}
+
+# Auto-extract anything
+extract() {
+ if [ -f $1 ]; then
+ case $1 in
+ *.tar.bz2) tar xvjf $1 ;;
+ *.tar.gz) tar xvzf $1 ;;
+ *.bz2) bunzip2 $1 ;;
+ *.rar) unrar x $1 ;;
+ *.gz) gunzip $1 ;;
+ *.tar) tar xvf $1 ;;
+ *.tbz2) tar xvjf $1 ;;
+ *.tgz) tar xvzf $1 ;;
+ *.zip) unzip $1 ;;
+ *.Z) uncompress $1 ;;
+ *.7z) 7z x $1 ;;
+ *.tar.zst) tar --use-compress-program=unzstd -xvf $1 ;;
+ *.zst) zstd -d $1 ;;
+ *) echo "don't know how to extract '$1'..." ;;
+ esac
+ else
+ echo "'$1' is not a valid file!"
+ fi
+}
+
+# Generate a password
+genpass() {
+ if [ $# != 1 ]; then
+ echo "Usage: genpass "
+ else
+ echo $(openssl rand -base64 $1 | tr -d "\n")
+ fi
+
+}
+
+# Sign a file with an SSH key
+ssh-sign(){
+ if [ $# != 2 ]; then
+ echo "Usage: ssh-sign "
+ else
+ if [ -f $2 ]; then
+ cat $2 | ssh-keygen -Y sign -f $1 -n file -
+ else
+ >&2 echo "File not found: $2"
+ fi
+ fi
+}
+
+# Verify a file, using the ~/.ssh/allowed_signers file
+ssh-verify(){
+ if [ $# != 3 ]; then
+ echo "Usage: ssh-verify "
+ else
+ ssh-keygen -Y verify -f ~/.ssh/allowed_signers -n file -I $1 -s $2 < $3
+ fi
+}
+
+# Fully restart a wireguard link
+wg-restart() {
+ if [ $# != 1 ]; then
+ echo "Usage: wg-restart "
+ else
+ wg-quick down $1 || true;
+ wg-quick up $1
+ fi
+}
+
+# Reload a wireguard link without stopping it
+wg-reload() {
+ if [ $# != 1 ]; then
+ echo "Usage: wg-reload "
+ else
+ sudo wg syncconf $1 <(sudo wg-quick strip $1)
+ fi
+}
+
+# Edit a wireguard config file
+wg-edit() {
+ if [ $# != 1 ]; then
+ echo "Usage: wg-edit "
+ else
+ sudoedit /etc/wireguard/$1.conf
+ fi
+}
+
+# Print a wireguard config file
+wg-cat() {
+ if [ $# != 1 ]; then
+ echo "Usage: wg-cat "
+ else
+ sudo cat /etc/wireguard/$1.conf
+ fi
+}
+
+# Updates ewconfig
+ewconfig-pull() {
+ cwd=$(pwd)
+ cd ~/.config/ewconfig
+ git pull ewp master
+ cd $cwd
+}
+
+# Updates the ewconfig on machines that don't have git
+ewconfig-pull-zip(){
+ cwd=$(pwd)
+ # If $EWCONFIG_ROOT/.git exists, don't let the user run this
+ if [ -d $EWCONFIG_ROOT/.git ]; then
+ echo "You can't run this command when ~/.config/ewconfig is a git repo!"
+ return 1
+ fi
+
+ # Download the latest zip
+ cd ~/Downloads
+ curl -L https://ewp.fyi/config.zip -o ewconfig.zip
+ rm -rf ~/.config/ewconfig
+ unzip ewconfig.zip
+ mv ewconfig-master ~/.config/ewconfig
+ rm ewconfig.zip
+
+ # Return to the original directory
+ cd $cwd
+}
+
+# Temporairly hop to the ewconfig directory to run a command
+ewconfig-run() {
+ cwd=$(pwd)
+ cd ~/.config/ewconfig
+ $@
+ cd $cwd
+}
+
+# Re-run the install script from anywhere
+ewconfig-reinstall() {
+ # Require an argument (linux, windows)
+ if [ $# != 1 ]; then
+ echo "Usage: ewconfig-reinstall "
+ return 1
+ fi
+
+ # Execute through ewconfig-run
+ ewconfig-run sh ./install-$1.sh
+}
+
+# Define a function to emulate gh
+gh-emulated() {
+ if [ $# != 3 ]; then
+ echo "You don't have gh installed. Emulating its functionality."
+ echo "Usage: gh repo clone /"
+ else
+ git clone https://github.com/$3
+ fi
+}
+
+# Only if `gh` is not installed
+if ! command -v gh &> /dev/null; then
+ alias gh=gh-emulated
+fi
+
+# Convert an SVG to a PNG
+svg2png() {
+ if [ $# != 1 ]; then
+ echo "Usage: svg2png "
+ else
+ inkscape -z "$1.png" "$1" --export-type=png
+ fi
+}
+
+# Get the AS Path to an IP
+aspath() {
+ # There must be at least one argument (cab be more)
+ if [ $# -lt 1 ]; then
+ echo "Usage: aspath [args]"
+ else
+ mtr $@ -z -rw -c1 -G0.25 | tail -n +3 | awk '{print $2}' | grep -v AS\?\?\? | uniq | cut -c 3- | tr '\n' ',' | sed 's/,/ -> /g' | rev | cut -c 5- | rev
+ fi
+}
+
+# Get the AS Path to an IP (include unknown hops)
+aspath-long() {
+ # There must be at least one argument (cab be more)
+ if [ $# -lt 1 ]; then
+ echo "Usage: aspath-long [args]"
+ else
+ mtr $@ -z -rw -c1 -G0.25 | tail -n +3 | awk '{print $2}' | uniq | cut -c 3- | tr '\n' ',' | sed 's/,/ -> /g' | rev | cut -c 5- | rev
+ fi
+}
diff --git a/configs/shells/zsh/.zshrc b/configs/shells/zsh/.zshrc
new file mode 100644
index 0000000..eae7fec
--- /dev/null
+++ b/configs/shells/zsh/.zshrc
@@ -0,0 +1,58 @@
+
+# There are some important env vars that need to exist
+export EWCONFIG_ROOT="$HOME/.config/ewconfig"
+
+# Load my custom prompt and macros
+. $EWCONFIG_ROOT/configs/shells/zsh/prompt.sh
+. $EWCONFIG_ROOT/configs/shells/zsh/macros.sh
+. $EWCONFIG_ROOT/configs/shells/zsh/keybinds.sh
+. $EWCONFIG_ROOT/configs/shells/zsh/autocomplete.sh
+
+# Load per-host configuration
+if [ -f $EWCONFIG_ROOT/configs/zsh/by_host/$HOSTNAME.sh ]; then
+ . $EWCONFIG_ROOT/configs/shells/zsh/by_host/$HOSTNAME.sh
+fi
+
+# Show some host info
+. $EWCONFIG_ROOT/configs/shells/bash/info.sh
+
+# I always want my ~/bin to be in my PATH
+export PATH="$HOME/bin:$PATH"
+export PATH="$EWCONFIG_ROOT/scripts:$PATH"
+export PATH="$HOME/.local/bin:$PATH"
+
+# Make sure libs can be found
+export LD_LIBRARY_PATH="/usr/local/lib:$LD_LIBRARY_PATH"
+export LD_LIBRARY_PATH="/usr/local/lib64:$LD_LIBRARY_PATH"
+
+# I want to be able to load my custom python modules
+export PYTHONPATH="$EWCONFIG_ROOT/python_modules:$PYTHONPATH"
+export PYTHONSTARTUP="$EWCONFIG_ROOT/configs/python/python_startup.py"
+
+# Configure a sane default editor
+if type -p nvim > /dev/null; then
+ export EDITOR="nvim"
+elif type -p vim > /dev/null; then
+ export EDITOR="vim"
+elif type -p vi > /dev/null; then
+ export EDITOR="vi"
+elif type -p nano > /dev/null; then
+ export EDITOR="nano"
+fi
+
+# If we have neovim, use it as the manpage viewer
+if type -p nvim > /dev/null; then
+ export MANPAGER="nvim +Man!"
+ export MANWIDTH=80
+fi
+
+# SDKMAN!
+export SDKMAN_DIR="$HOME/.sdkman"
+[[ -s "$HOME/.sdkman/bin/sdkman-init.sh" ]] && source "$HOME/.sdkman/bin/sdkman-init.sh"
+
+# Flutter
+[[ -s "$HOME/pkg/flutter/bin" ]] && export PATH="$HOME/pkg/flutter/bin:$PATH"
+
+# Rye
+[[ -s "$HOME/.rye/env" ]] && source "$HOME/.rye/env"
+. "$HOME/.cargo/env"
diff --git a/configs/shells/zsh/autocomplete.sh b/configs/shells/zsh/autocomplete.sh
new file mode 100644
index 0000000..cc501ed
--- /dev/null
+++ b/configs/shells/zsh/autocomplete.sh
@@ -0,0 +1,46 @@
+# Allow programs to write their own autocomplete functions to ~/.zfunc
+mkdir -p ~/.zfunc
+fpath+=~/.zfunc
+
+# Make all shells append to history file instantly
+setopt INC_APPEND_HISTORY
+
+# If we have rustup, it can be used to generate completeions for itself and cargo
+if type -p rustup >/dev/null; then
+ # Only generate if the files don't already exist
+ if [[ ! -f ~/.zfunc/_rustup ]]; then
+ rustup completions zsh > ~/.zfunc/_rustup
+ fi
+ if [[ ! -f ~/.zfunc/_cargo ]]; then
+ rustup completions zsh cargo > ~/.zfunc/_cargo
+ fi
+fi
+
+# Enable auto-complete
+autoload -Uz compinit && compinit
+
+# Handles case-insensitive completion
+zstyle ':completion:*' matcher-list 'm:{a-z}={A-Za-z}'
+
+# Configure command history
+HISTFILE=~/.histfile
+HISTSIZE=100000
+SAVEHIST=100000
+
+# Ignore duplicates in history search, and dont't write them either
+setopt HIST_FIND_NO_DUPS
+setopt HIST_IGNORE_ALL_DUPS
+
+# Ignore commands starting with a space
+setopt HIST_IGNORE_SPACE
+
+# Allow up arrow to be used to go back in history based on current line contents
+autoload -U up-line-or-beginning-search
+autoload -U down-line-or-beginning-search
+zle -N up-line-or-beginning-search
+zle -N down-line-or-beginning-search
+bindkey "^[[A" up-line-or-beginning-search # Up
+bindkey "^[[B" down-line-or-beginning-search # Down
+bindkey "^[OA" up-line-or-beginning-search # Up over SSH connection
+bindkey "^[OB" down-line-or-beginning-search # Down over SSH connection
+
diff --git a/configs/shells/zsh/keybinds.sh b/configs/shells/zsh/keybinds.sh
new file mode 100644
index 0000000..9459345
--- /dev/null
+++ b/configs/shells/zsh/keybinds.sh
@@ -0,0 +1,26 @@
+### ctrl+arrows
+bindkey "\e[1;5C" forward-word
+bindkey "\e[1;5D" backward-word
+# urxvt
+bindkey "\eOc" forward-word
+bindkey "\eOd" backward-word
+
+### ctrl+delete
+bindkey "\e[3;5~" kill-word
+# urxvt
+bindkey "\e[3^" kill-word
+
+### ctrl+backspace
+bindkey '^H' backward-kill-word
+
+### ctrl+shift+delete
+bindkey "\e[3;6~" kill-line
+# urxvt
+bindkey "\e[3@" kill-line
+
+# Home and end
+bindkey "^[[H" beginning-of-line
+bindkey "^[[F" end-of-line
+
+# Delete
+bindkey "^[[3~" delete-char
diff --git a/configs/shells/zsh/macros.sh b/configs/shells/zsh/macros.sh
new file mode 100644
index 0000000..22a53f6
--- /dev/null
+++ b/configs/shells/zsh/macros.sh
@@ -0,0 +1,14 @@
+# We are compatible with bash, so first load the bash-specific macros
+. $EWCONFIG_ROOT/configs/shells/bash/macros.sh
+
+# Aliases
+alias zshreload="source ~/.zshrc"
+
+# Search for a process
+proc-grep() {
+ if [ $# != 1 ]; then
+ echo "Usage: proc-grep "
+ else
+ ps aux | { head -1; grep $1 }
+ fi
+}
diff --git a/configs/shells/zsh/prompt.sh b/configs/shells/zsh/prompt.sh
new file mode 100644
index 0000000..d107017
--- /dev/null
+++ b/configs/shells/zsh/prompt.sh
@@ -0,0 +1,48 @@
+# This is some kind of dark magic.
+# I have no memory of whats going on here, but this has been my config since 2015-ish, so it shall not be touched.
+# This was origionally written for crosh, so that may be part of the problem...
+
+autoload -U colors && colors
+NEWLINE=$'\n'
+USER_ICON="$"
+
+# Use colors to signal the current connection / user privs
+if [[ $(id -u) = 0 ]]; then
+ HOST_COLOR="red"
+ USER_ICON="#"
+elif [ -n "$SSH_CLIENT" ] || [ -n "$SSH_TTY" ]; then
+ HOST_COLOR="yellow"
+elif [ "$EWCONFIG_IN_DOCKER" = "1" ]; then
+ HOST_COLOR="blue"
+else
+ HOST_COLOR="green"
+fi
+
+# Clear the prompt
+export PROMPT=""
+
+# If we are *NOT* in Termux, show the host and username
+if ! command -v termux-setup-storage; then
+ export PROMPT="%{$fg[$HOST_COLOR]%}%n@%M "
+fi
+
+# Add the common prompt parts
+export PROMPT="${PROMPT}%{$fg[cyan]%}%~ ${USER_ICON} %{$reset_color%}"
+setopt prompt_subst
+autoload -Uz vcs_info
+zstyle ':vcs_info:*' actionformats \
+ '%F{5}(%f%s%F{5})%F{3}-%F{5}[%F{2}%b%F{3}|%F{1}%a%F{5}]%f '
+zstyle ':vcs_info:*' formats \
+ '%F{5}(%f%s%F{5})%F{3}-%F{5}[%F{2}%b%F{5}]%f '
+zstyle ':vcs_info:(sv[nk]|bzr):*' branchformat '%b%F{1}:%F{3}%r'
+
+zstyle ':vcs_info:*' enable git cvs svn
+
+# or use pre_cmd, see man zshcontrib
+vcs_info_wrapper() {
+ vcs_info
+ if [ -n "$vcs_info_msg_0_" ]; then
+ echo "%{$fg[grey]%}${vcs_info_msg_0_}%{$reset_color%}$del"
+ fi
+}
+export RPROMPT=$'%T $(vcs_info_wrapper)%?'
diff --git a/configs/ssh/allowed_signers b/configs/ssh/allowed_signers
new file mode 100644
index 0000000..0f1c41e
--- /dev/null
+++ b/configs/ssh/allowed_signers
@@ -0,0 +1,4 @@
+evan@ewpratten.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMbNW3x0Cx04MybxQo5FptZhdjiFJjKITtq67KqrDXAN+jepJwdIG6TUJzuJddztuYIsvPAoQPpiQVSPyC8y51Y=
+evan@ewpratten.com sk-ssh-ed25519@openssh.com AAAAGnNrLXNzaC1lZDI1NTE5QG9wZW5zc2guY29tAAAAIAkdmKF1cYQTW7cfK7TYC5iVBsAg5g3SRJqlqo2NixHdAAAABHNzaDo=
+evan@ewpratten.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGBaSKoWYuR43fxRPy31P/X/2Ri2hYUZTjKiLBRDoa1F
+evan@ewpratten.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGEqeWe+pVp3rTsmHr9v5Qcb+niHXihJxwVKcc7p6EXz
diff --git a/configs/ssh/config b/configs/ssh/config
new file mode 100644
index 0000000..606ad98
--- /dev/null
+++ b/configs/ssh/config
@@ -0,0 +1,116 @@
+# vim: nospell
+# Global Rules
+Host * !*.github.com !github.com
+ IdentityFile %d/.ssh/id_ed25519_sk_rk_yk20572395
+ IdentityFile %d/.ssh/id_ed25519
+ IdentityFile %d/.ssh/id_rsa
+ # VisualHostKey yes
+ #VerifyHostKeyDNS ask
+
+
+# Github SSH adapter for restricted networks
+Host github.com gist.github.com
+ HostName ssh.github.com
+ Port 443
+ # This solves a VSCode bug
+ IdentityFile %d/.ssh/id_ed25519
+ IdentityFile %d/.ssh/id_ed25519_sk_rk_yk20572395
+ # IdentityFile %d/.ssh/id_rsa
+
+# Home network
+Host unifi
+ HostName dreammachinepro.local
+ User root
+ PreferredAuthentications password
+ PubkeyAuthentication no
+
+Host ewpratten-steamdeck
+ HostName ewpratten-steamdeck.local
+ User deck
+ ForwardX11 yes
+
+Host 10.80.0.218
+ User root
+ PreferredAuthentications password
+ PubkeyAuthentication no
+ PubkeyAcceptedKeyTypes +ssh-rsa
+ HostKeyAlgorithms=+ssh-rsa
+
+# Default hostnames I may encounter in the wild
+Host openrepeater.local
+ HostName openrepeater.local
+ User root
+ StrictHostKeyChecking no
+
+Host raspberrypi.local
+ StrictHostKeyChecking no
+ User pi
+
+Host 192.168.8.1
+ User root
+ PreferredAuthentications password
+ PubkeyAuthentication no
+ PubkeyAcceptedKeyTypes +ssh-rsa
+ HostKeyAlgorithms=+ssh-rsa
+
+# Raider Robotics
+Host 10.50.24.2
+ StrictHostKeyChecking no
+ User admin
+
+# Sheridan College
+Host db6.fast.sheridanc.on.ca
+ User user03
+Host dbr.fast.sheridanc.on.ca
+ User DataIntegrator
+Host *.fast.sheridanc.on.ca
+ ProxyJump prattene@atlas.sheridanc.on.ca
+Host atlas.sheridanc.on.ca
+ User prattene
+Host *.sheridanc.on.ca
+ User prattene
+ RequestTTY yes
+ PubkeyAcceptedKeyTypes +ssh-rsa
+ HostKeyAlgorithms +ssh-rsa
+
+# NLNOG
+Host *.ring.nlnog.net
+ User ewpratten
+ IdentitiesOnly yes
+
+# SDF.org
+Host sdf.org *.sdf.org
+ User ewpratten
+Host sdf
+ HostName sdf.org
+Host otaku
+ HostName otaku.sdf.org
+
+# Guru
+Host *.gurustudio.com
+ User "guru-domain\\epratten"
+Host td-prod td-prod2 td-prod3 td-prod4
+ User guru
+Host w6421
+ User "guru-domain\\epratten"
+ #RequestTTY yes
+ # RemoteCommand "C:\Program Files\Git\bin\bash.exe" --login
+
+# Personal Infra
+Host oci-arm
+ HostName oci-arm.vpn.ewp.fyi
+ User ubuntu
+
+# Pratten Machines
+Host warren-desktop
+ HostName workstation.warren.vpn
+
+Host gianna-desktop
+ HostName workstation.gianna.vpn
+
+# Various Aliases
+Host desktop
+ HostName ewpratten-desktop.home
+
+Host laptop
+ HostName ewpratten-laptop.home
diff --git a/configs/tabset/devices/huion-kamvas-13/name b/configs/tabset/devices/huion-kamvas-13/name
new file mode 100755
index 0000000..3ed584f
--- /dev/null
+++ b/configs/tabset/devices/huion-kamvas-13/name
@@ -0,0 +1 @@
+Huion Kamvas 13
\ No newline at end of file
diff --git a/configs/tabset/devices/huion-kamvas-13/peripherals b/configs/tabset/devices/huion-kamvas-13/peripherals
new file mode 100755
index 0000000..ea78a73
--- /dev/null
+++ b/configs/tabset/devices/huion-kamvas-13/peripherals
@@ -0,0 +1,2 @@
+Tablet Monitor stylus
+Tablet Monitor Pad pad
\ No newline at end of file
diff --git a/configs/tabset/devices/huion-kamvas-13/profiles/blender.sh b/configs/tabset/devices/huion-kamvas-13/profiles/blender.sh
new file mode 100755
index 0000000..2d54072
--- /dev/null
+++ b/configs/tabset/devices/huion-kamvas-13/profiles/blender.sh
@@ -0,0 +1,18 @@
+# Top three buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 1 "key +ctrl +z -z -ctrl"
+xsetwacom --set "Tablet Monitor Pad pad" Button 2 "key +ctrl +tab -tab -ctrl"
+xsetwacom --set "Tablet Monitor Pad pad" Button 3 "key x"
+
+# Middle two buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 8 "key shift"
+xsetwacom --set "Tablet Monitor Pad pad" Button 9 "key ctrl"
+
+# Bottom three buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 10 "key 1"
+xsetwacom --set "Tablet Monitor Pad pad" Button 11 "key 3"
+xsetwacom --set "Tablet Monitor Pad pad" Button 12 "key 0"
+
+# Pen buttons
+xsetwacom set "Tablet Monitor stylus" "Button" "1" "button +1 "
+xsetwacom set "Tablet Monitor stylus" "Button" "2" "button +2 "
+xsetwacom set "Tablet Monitor stylus" "Button" "3" "button +3 "
\ No newline at end of file
diff --git a/configs/tabset/devices/huion-kamvas-13/profiles/default.sh b/configs/tabset/devices/huion-kamvas-13/profiles/default.sh
new file mode 120000
index 0000000..b982e46
--- /dev/null
+++ b/configs/tabset/devices/huion-kamvas-13/profiles/default.sh
@@ -0,0 +1 @@
+blender.sh
\ No newline at end of file
diff --git a/configs/tabset/devices/huion-kamvas-13/profiles/disabled.sh b/configs/tabset/devices/huion-kamvas-13/profiles/disabled.sh
new file mode 100755
index 0000000..46e6ae1
--- /dev/null
+++ b/configs/tabset/devices/huion-kamvas-13/profiles/disabled.sh
@@ -0,0 +1,18 @@
+# Top three buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 1 "button 0"
+xsetwacom --set "Tablet Monitor Pad pad" Button 2 "button 0"
+xsetwacom --set "Tablet Monitor Pad pad" Button 3 "button 0"
+
+# Middle two buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 8 "button 0"
+xsetwacom --set "Tablet Monitor Pad pad" Button 9 "button 0"
+
+# Bottom three buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 10 "button 0"
+xsetwacom --set "Tablet Monitor Pad pad" Button 11 "button 0"
+xsetwacom --set "Tablet Monitor Pad pad" Button 12 "button 0"
+
+# Pen buttons
+xsetwacom set "Tablet Monitor stylus" "Button" "1" "button +1 "
+xsetwacom set "Tablet Monitor stylus" "Button" "2" "button +2 "
+xsetwacom set "Tablet Monitor stylus" "Button" "3" "button +3 "
diff --git a/configs/tabset/devices/huion-kamvas-13/profiles/gimp.sh b/configs/tabset/devices/huion-kamvas-13/profiles/gimp.sh
new file mode 100755
index 0000000..34732ad
--- /dev/null
+++ b/configs/tabset/devices/huion-kamvas-13/profiles/gimp.sh
@@ -0,0 +1,18 @@
+# Top three buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 1 "key +ctrl +z -z -ctrl"
+xsetwacom --set "Tablet Monitor Pad pad" Button 2 "key +ctrl +shift +a -a -shift -ctrl"
+xsetwacom --set "Tablet Monitor Pad pad" Button 3 "key del"
+
+# Middle two buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 8 "key shift"
+xsetwacom --set "Tablet Monitor Pad pad" Button 9 "key ctrl"
+
+# Bottom three buttons
+xsetwacom --set "Tablet Monitor Pad pad" Button 10 "key +shift +e -e -shift"
+xsetwacom --set "Tablet Monitor Pad pad" Button 11 "key p"
+xsetwacom --set "Tablet Monitor Pad pad" Button 12 "key f"
+
+# Pen buttons
+xsetwacom set "Tablet Monitor stylus" "Button" "1" "button +1 "
+xsetwacom set "Tablet Monitor stylus" "Button" "2" "button +2 "
+xsetwacom set "Tablet Monitor stylus" "Button" "3" "button +3 "
diff --git a/configs/termux/termux.properties b/configs/termux/termux.properties
new file mode 100644
index 0000000..9f8a262
--- /dev/null
+++ b/configs/termux/termux.properties
@@ -0,0 +1,7 @@
+# Terminal behaviour
+bell-character=vibrate
+
+# Keyboard
+extra-keys-style = default
+extra-keys = [['ESC', 'TAB', 'CTRL', 'ALT', 'LEFT', 'DOWN', 'UP', 'RIGHT']]
+
diff --git a/configs/tmux/.tmux.conf b/configs/tmux/.tmux.conf
new file mode 100644
index 0000000..c8a0605
--- /dev/null
+++ b/configs/tmux/.tmux.conf
@@ -0,0 +1 @@
+set -g mouse on
\ No newline at end of file
diff --git a/configs/user-tmpfiles.d/discord-rpc.conf b/configs/user-tmpfiles.d/discord-rpc.conf
new file mode 100644
index 0000000..031fc68
--- /dev/null
+++ b/configs/user-tmpfiles.d/discord-rpc.conf
@@ -0,0 +1 @@
+L %t/discord-ipc-0 - - - - app/com.discordapp.Discord/discord-ipc-0
\ No newline at end of file
diff --git a/configs/vim/.vimrc b/configs/vim/.vimrc
new file mode 100644
index 0000000..d411eae
--- /dev/null
+++ b/configs/vim/.vimrc
@@ -0,0 +1,80 @@
+" Syntax highlighting
+syntax on
+
+" Enable modeline
+set modeline
+
+" Enable mouse usage
+set mouse=a
+
+" Tab size
+set tabstop=4
+set shiftwidth=4
+set expandtab
+
+" VSCode-style selection indenting
+nnoremap >>_
+nnoremap <<_
+inoremap
+vnoremap >gv
+vnoremap
+inoremap lcw
+nnoremap cwl
+
+" Force VIM to use system clipboard
+set clipboard=unnamedplus
+
+" Enable per-project vimrc files
+set exrc
+set secure
+
+" Enable filetype plugins
+set nocompatible
+filetype plugin on
+syntax on
+
+" Hide the intro message
+set shortmess+=I
+
+" Force help documents into new tabs
+cnoreabbrev help getcmdtype() == ":" && getcmdline() == 'help' ? 'tab help' : 'help'
+cnoreabbrev h getcmdtype() == ":" && getcmdline() == 'h' ? 'tab help' : 'h'
+
+" Configure netrw
+let g:netrw_liststyle = 3
+let g:netrw_banner = 0
+let g:netrw_browse_split = 3
+
+" Enable spell checking
+if !exists('g:vscode')
+ set spell
+ set spelllang=en_ca,en_us
+ set spelloptions=camel
+
+ " Hide spellcheck highlights I don't care about
+ hi clear SpellCap
+ hi clear SpellRare
+
+ " Change the spellcheck highlight to an underline
+ hi clear SpellBad
+ hi SpellBad cterm=underline ctermfg=DarkRed
+
+ " Disable spellcheck for some file formats
+ autocmd FileType man setlocal nospell
+ autocmd FileType diff setlocal nospell
+ autocmd FileType usda setlocal nospell
+endif
+
+" Configure Diff rendering
+hi DiffText ctermfg=White ctermbg=none
+hi DiffFile ctermfg=White ctermbg=none cterm=bold
+hi DiffIndexLine ctermfg=White ctermbg=none cterm=bold
+hi DiffAdd ctermfg=DarkGreen ctermbg=none
+hi DiffChange ctermfg=DarkRed ctermbg=none
+hi DiffDelete ctermfg=DarkRed ctermbg=none
diff --git a/configs/windows-terminal/settings.json b/configs/windows-terminal/settings.json
new file mode 100644
index 0000000..17841ce
--- /dev/null
+++ b/configs/windows-terminal/settings.json
@@ -0,0 +1,80 @@
+{
+ "$help": "https://aka.ms/terminal-documentation",
+ "$schema": "https://aka.ms/terminal-profiles-schema",
+ "actions": [
+ {
+ "command": {
+ "action": "copy",
+ "singleLine": false
+ },
+ "keys": "ctrl+shift+c"
+ },
+ {
+ "command": "paste",
+ "keys": "ctrl+shift+v"
+ }
+ ],
+ "copyFormatting": "none",
+ "copyOnSelect": false,
+ "defaultProfile": "{2ece5bfe-50ed-5f3a-ab87-5cd4baafed2b}",
+ "newTabMenu": [
+ {
+ "type": "remainingProfiles"
+ }
+ ],
+ "profiles": {
+ "defaults": {
+ "useAtlasEngine": true
+ },
+ "list": [
+ {
+ "commandline": "%SystemRoot%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe",
+ "guid": "{61c54bbd-c2c6-5271-96e7-009a87ff44bf}",
+ "font": {
+ "face": "Consolas"
+ },
+ "hidden": false,
+ "name": "Windows PowerShell"
+ },
+ {
+ "commandline": "%SystemRoot%\\System32\\cmd.exe",
+ "guid": "{0caa0dad-35be-5f56-a8ff-afceeeaa6101}",
+ "font": {
+ "face": "Consolas"
+ },
+ "hidden": false,
+ "name": "Command Prompt"
+ },
+ {
+ "guid": "{2ece5bfe-50ed-5f3a-ab87-5cd4baafed2b}",
+ "commandline": "C:/Program Files/Git/bin/bash.exe --login",
+ "font": {
+ "face": "Consolas"
+ },
+ "hidden": false,
+ "name": "Git Bash",
+ "source": "Git"
+ },
+ {
+ "guid": "{2c4de342-38b7-51cf-b940-2309a097f518}",
+ "font": {
+ "face": "Consolas"
+ },
+ "hidden": true,
+ "name": "Ubuntu",
+ "source": "Windows.Terminal.Wsl"
+ },
+ {
+ "guid": "{51855cb2-8cce-5362-8f54-464b92b32386}",
+ "font": {
+ "face": "Consolas"
+ },
+ "hidden": false,
+ "name": "Ubuntu",
+ "source": "CanonicalGroupLimited.Ubuntu_79rhkp1fndgsc"
+ }
+ ]
+ },
+ "schemes": [],
+ "themes": []
+}
\ No newline at end of file
diff --git a/install-linux.sh b/install-linux.sh
new file mode 100644
index 0000000..f175a2e
--- /dev/null
+++ b/install-linux.sh
@@ -0,0 +1,154 @@
+#! /bin/sh
+set -e
+export EWCONFIG_ROOT=$(dirname $(readlink -f $0))
+
+# -- Ensure that deps exist --
+
+# Pull git submodules if needed
+if type -p git > /dev/null; then
+ # If we have permission to run git
+ if [ -d "$EWCONFIG_ROOT/.git" ]; then
+ echo "Syncing git submodules..."
+ git submodule update --init --recursive
+
+ # Make sure that the `ewp` upstream exists
+ if ! git remote | grep -q ewp; then
+ echo "Adding secondary git remote"
+ git remote add ewp git://git.ewpratten.com/ewconfig
+ git remote set-url --push ewp ssh://ewpratten@git.ewpratten.com:/srv/git/ewconfig
+ echo "Remotes are:"
+ git remote -v
+ fi
+ fi
+fi
+
+# Make sure scripts are all executable
+chmod +x $EWCONFIG_ROOT/scripts/*
+chmod +x $EWCONFIG_ROOT/configs/nautilus/scripts/*
+
+# -- Directory Setup --
+set -x
+
+# Ensure that needed directories exist
+mkdir -p ~/Downloads # For downloads
+mkdir -p ~/bin # Personal bin dir. Reduces the risk of breaking ~/.local/bin
+mkdir -p ~/projects # For my projects
+mkdir -p ~/src # For compiling other people's projects
+mkdir -p ~/services # Service dir for servers mostly
+
+# Build the directory structure if ~/.config
+mkdir -p ~/.config/nvim
+mkdir -p ~/.config/termux
+mkdir -p ~/.config/logid
+mkdir -p ~/.config/systemd/user
+mkdir -p ~/.config/git
+mkdir -p ~/.config/git/config-fragments
+mkdir -p ~/.config/user-tmpfiles.d
+mkdir -p ~/.cargo
+mkdir -p ~/.ssh
+
+# Build directory structure for program plugins
+mkdir -p ~/.local/share/nautilus/scripts/
+
+# -- Config Linking --
+
+# Configure the shell
+ln -sf $EWCONFIG_ROOT/configs/shells/zsh/.zshrc ~/.zshrc
+ln -sf $EWCONFIG_ROOT/configs/shells/bash/.bashrc ~/.bashrc
+
+# Configure Git
+ln -sf $EWCONFIG_ROOT/configs/git/.gitconfig ~/.gitconfig
+ln -sf $EWCONFIG_ROOT/configs/sssh/allowed_signers ~/.ssh/allowed_signers
+ln -sf $EWCONFIG_ROOT/configs/git/.mailmap ~/.config/git/.mailmap
+
+# Copy the global mailmap file once
+if [ ! -f ~/.config/git/config-fragments/global-mailmap.gitconfig ]; then
+ cp $EWCONFIG_ROOT/configs/git/config-fragments/global-mailmap.gitconfig ~/.config/git/config-fragments/global-mailmap.gitconfig
+fi
+
+# Check if GIT is installed > 2.34
+set +x
+if type -p git > /dev/null; then
+ # If sort has a -V option
+ if man sort | grep -q -- -V; then
+ # If GIT has SSH signing support, enable it
+ git_version=$(git --version | cut -d' ' -f3 | cut -d'.' -f1-2)
+ minimum_version=2.34
+ if [ "$(printf '%s\n' "$minimum_version" "$git_version" | sort -V | head -n1)" = "$minimum_version" ]; then
+ set -x
+ ln -sf $EWCONFIG_ROOT/configs/git/config-fragments/enable-signing.gitconfig ~/.config/git/config-fragments/enable-signing.gitconfig
+ fi
+ fi
+fi
+
+# Configure SSH
+ln -sf $EWCONFIG_ROOT/configs/ssh/config ~/.ssh/config
+chmod 644 "$HOME/.ssh/config"
+if type -p chown > /dev/null; then chown "$USER:$USER" "$HOME/.ssh/config"; fi
+
+# Configure (neo)Vim
+ln -sf $EWCONFIG_ROOT/configs/vim/.vimrc ~/.vimrc
+ln -sf $EWCONFIG_ROOT/configs/nvim/init.vim ~/.config/nvim/init.vim
+ln -snf $EWCONFIG_ROOT/configs/nvim/pack ~/.config/nvim/pack
+ln -snf $EWCONFIG_ROOT/configs/nvim/third_party ~/.config/nvim/third_party
+
+# Nautilus right-click scripts
+ln -sf $EWCONFIG_ROOT/configs/nautilus/scripts/* ~/.local/share/nautilus/scripts/
+
+# Tabset configs
+ln -nsf $EWCONFIG_ROOT/configs/tabset ~/.config/tabset
+
+# Rofi configs
+ln -nsf $EWCONFIG_ROOT/configs/rofi ~/.config/rofi
+
+# Cargo
+ln -sf $EWCONFIG_ROOT/configs/cargo/config.toml ~/.cargo/config.toml
+
+# Termux
+ln -sf $EWCONFIG_ROOT/configs/termux/termux.properties ~/.config/termux/termux.properties
+
+# Set up user-tempfiles configs
+ln -sf $EWCONFIG_ROOT/configs/user-tmpfiles.d/* ~/.config/user-tmpfiles.d/
+
+# Logid config
+ln -sf $EWCONFIG_ROOT/configs/logid/logid.cfg ~/.config/logid/logid.cfg
+
+# Minecraft global configs
+ln -nsf $EWCONFIG_ROOT/configs/minecraft ~/.config/minecraft
+if [ -d ~/.var/app/org.prismlauncher.PrismLauncher ]; then
+ flatpak override --user --filesystem=~/.config/minecraft org.prismlauncher.PrismLauncher
+fi
+
+# Memegen
+ln -nsf $EWCONFIG_ROOT/configs/memegen ~/.config/memegen
+
+# Tmux
+ln -sf $EWCONFIG_ROOT/configs/tmux/.tmux.conf ~/.tmux.conf
+
+# -- Optional Configs --
+set +x
+
+# If ~/.config/git/config-fragments/personal-info.gitconfig does not exist
+if [ ! -f ~/.config/git/config-fragments/personal-info.gitconfig ]; then
+ # Ask if the user wants to install personal GIT config
+ echo -n "Do you want to install the personal GIT config? (y/n) "
+ read -r install_git_config
+ if [ "$install_git_config" = "y" ]; then
+ ln -sf $EWCONFIG_ROOT/configs/git/config-fragments/personal-info.gitconfig ~/.config/git/config-fragments/personal-info.gitconfig
+ fi
+fi
+
+# Link houdini scripts for appropriate versions
+if [ -d ~/houdini19.5 ]; then mkdir -p ~/houdini19.5/scripts; ln -sf $EWCONFIG_ROOT/configs/houdini19.5/scripts/* ~/houdini19.5/scripts; fi
+
+# Link blender scripts for appropriate versions
+if [ -d ~/.config/blender/3.6 ]; then ln -sf $EWCONFIG_ROOT/configs/blender/3.x/scripts/addons/* ~/.config/blender/3.6/scripts/addons/; fi
+
+# -- Finalization --
+
+# On systems that need it, configure Gnome
+sh ./configs/gnome/gnome-terminal-settings.sh || true
+sh ./configs/gnome/desktop-settings.sh || true
+
+# Attempt to force a termux settings reload on Android devices
+termux-reload-settings || true
diff --git a/install-windows.sh b/install-windows.sh
new file mode 100644
index 0000000..a835325
--- /dev/null
+++ b/install-windows.sh
@@ -0,0 +1,63 @@
+#! /bin/sh
+set -e
+export EWCONFIG_ROOT=$(dirname $(readlink -f $0))
+
+# Pull git submodules if needed
+echo "Syncing git submodules..."
+git submodule update --init --recursive
+
+# Make sure that the `ewp` upstream exists
+if ! git remote | grep -q ewp; then
+ echo "Adding secondary git remote"
+ git remote add ewp git://git.ewpratten.com/ewconfig
+ git remote set-url --push ewp ssh://ewpratten@git.ewpratten.com:/srv/git/ewconfig
+ echo "Remotes are:"
+ git remote -v
+fi
+
+# Make sure scripts are all executable
+chmod +x $EWCONFIG_ROOT/scripts/*
+chmod +x $EWCONFIG_ROOT/configs/nautilus/scripts/*
+
+# -- Directory Setup --
+set -x
+
+# Ensure that needed directories exist
+mkdir -p ~/bin # Personal bin dir. Reduces the risk of breaking ~/.local/bin
+mkdir -p ~/projects # For my projects
+
+# Build the directory structure if ~/.config
+mkdir -p ~/.config/git
+mkdir -p ~/.config/git/config-fragments
+mkdir -p ~/.cargo
+mkdir -p ~/.ssh
+
+# -- Config Linking --
+
+# Configure the shell
+ln -sf $EWCONFIG_ROOT/configs/shells/zsh/.zshrc ~/.zshrc
+ln -sf $EWCONFIG_ROOT/configs/shells/bash/.bashrc ~/.bashrc
+mkdir -p $LOCALAPPDATA/Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState
+ln -sf $EWCONFIG_ROOT/configs/windows-terminal/settings.json $LOCALAPPDATA/Packages/Microsoft.WindowsTerminal_8wekyb3d8bbwe/LocalState/settings.json
+
+# Configure Git
+ln -sf $EWCONFIG_ROOT/configs/git/.gitconfig ~/.gitconfig
+ln -sf $EWCONFIG_ROOT/configs/sssh/allowed_signers ~/.ssh/allowed_signers || true
+ln -sf $EWCONFIG_ROOT/configs/git/.mailmap ~/.config/git/.mailmap
+
+# Configure Vim
+ln -sf $EWCONFIG_ROOT/configs/vim/.vimrc ~/.vimrc
+
+# Remove Microsoft's fake python executables
+rm $LOCALAPPDATA/Microsoft/WindowsApps/python.exe || true
+rm $LOCALAPPDATA/Microsoft/WindowsApps/python3.exe || true
+
+# Copy the global mailmap file once
+if [ ! -f ~/.config/git/config-fragments/global-mailmap.gitconfig ]; then
+ cp $EWCONFIG_ROOT/configs/git/config-fragments/global-mailmap.gitconfig ~/.config/git/config-fragments/global-mailmap.gitconfig
+fi
+
+# Configure SSH
+ln -sf $EWCONFIG_ROOT/configs/ssh/config ~/.ssh/config
+chmod 644 "$HOME/.ssh/config"
+chown "$USER:$USER" "$HOME/.ssh/config"
\ No newline at end of file
diff --git a/keyboards/qmk/keymaps/ferris/combos.c b/keyboards/qmk/keymaps/ferris/combos.c
new file mode 100644
index 0000000..79836ec
--- /dev/null
+++ b/keyboards/qmk/keymaps/ferris/combos.c
@@ -0,0 +1,87 @@
+#include QMK_KEYBOARD_H
+
+
+#define MAKE_COMBO_INPUTS(name, inputs...) const uint16_t PROGMEM combo_inputs_##name[] = {inputs, COMBO_END};
+
+
+// Combo tokens. These are used to identify combos for later processing if needed.
+enum combos {
+ // Combo that uses both pinky fingers to produce an keypress
+ COMBO_PINKY_ENTER,
+ // Combo that uses both ring fingers to produce a keypress
+ // COMBO_RING_BACKSPC,
+ // Combo that uses the ring fingers to send a semicolon
+ COMBO_RING_SCLN,
+ // Copbo that uses the pinky fingers to send escape
+ COMBO_PINKY_ESC,
+
+ // ASETNIOP
+ ASETNIOP_AE_Q,
+ ASETNIOP_RA_W,
+ ASETNIOP_RE_Z,
+ ASETNIOP_SA_X,
+ ASETNIOP_SR_F,
+ ASETNIOP_TA_P,
+ ASETNIOP_TR_C,
+ ASETNIOP_TS_D,
+ ASETNIOP_TN_B,
+ ASETNIOP_TE_V,
+ ASETNIOP_TI_G,
+ ASETNIOP_NA_J,
+ ASETNIOP_NR_K,
+ ASETNIOP_NS_M,
+ ASETNIOP_NE_H,
+ ASETNIOP_NO_L,
+ ASETNIOP_EI_U,
+ ASETNIOP_IN_Y,
+};
+
+// Define all the input combinations needed for the combos
+MAKE_COMBO_INPUTS(COMBO_PINKY_ENTER, KC_A, KC_O)
+MAKE_COMBO_INPUTS(COMBO_RING_SCLN, KC_R, KC_I)
+MAKE_COMBO_INPUTS(COMBO_PINKY_ESC, KC_Q, KC_BSPC)
+// MAKE_COMBO_INPUTS(COMBO_RING_BACKSPC, KC_R, KC_I)
+MAKE_COMBO_INPUTS(ASETNIOP_AE_Q, KC_A, KC_E)
+MAKE_COMBO_INPUTS(ASETNIOP_RA_W, KC_R, KC_A)
+MAKE_COMBO_INPUTS(ASETNIOP_RE_Z, KC_R, KC_E)
+MAKE_COMBO_INPUTS(ASETNIOP_SA_X, KC_S, KC_A)
+MAKE_COMBO_INPUTS(ASETNIOP_SR_F, KC_S, KC_R)
+MAKE_COMBO_INPUTS(ASETNIOP_TA_P, KC_T, KC_A)
+MAKE_COMBO_INPUTS(ASETNIOP_TR_C, KC_T, KC_R)
+MAKE_COMBO_INPUTS(ASETNIOP_TS_D, KC_T, KC_S)
+MAKE_COMBO_INPUTS(ASETNIOP_TN_B, KC_T, KC_N)
+MAKE_COMBO_INPUTS(ASETNIOP_TE_V, KC_T, KC_E)
+MAKE_COMBO_INPUTS(ASETNIOP_TI_G, KC_T, KC_I)
+MAKE_COMBO_INPUTS(ASETNIOP_NA_J, KC_N, KC_A)
+MAKE_COMBO_INPUTS(ASETNIOP_NR_K, KC_N, KC_R)
+MAKE_COMBO_INPUTS(ASETNIOP_NS_M, KC_N, KC_S)
+MAKE_COMBO_INPUTS(ASETNIOP_NE_H, KC_N, KC_E)
+MAKE_COMBO_INPUTS(ASETNIOP_NO_L, KC_N, KC_O)
+MAKE_COMBO_INPUTS(ASETNIOP_EI_U, KC_E, KC_I)
+MAKE_COMBO_INPUTS(ASETNIOP_IN_Y, KC_I, KC_N)
+
+// Map everything together
+combo_t key_combos[COMBO_COUNT] = {
+ [COMBO_PINKY_ENTER] = COMBO(combo_inputs_COMBO_PINKY_ENTER, KC_ENT),
+ [COMBO_RING_SCLN] = COMBO(combo_inputs_COMBO_RING_SCLN, KC_SCLN),
+ [COMBO_PINKY_ESC] = COMBO(combo_inputs_COMBO_PINKY_ESC, KC_ESC),
+ // [COMBO_RING_BACKSPC] = COMBO(combo_inputs_COMBO_RING_BACKSPC, KC_BSPC),
+ [ASETNIOP_AE_Q] = COMBO(combo_inputs_ASETNIOP_AE_Q, KC_Q),
+ [ASETNIOP_RA_W] = COMBO(combo_inputs_ASETNIOP_RA_W, KC_W),
+ [ASETNIOP_RE_Z] = COMBO(combo_inputs_ASETNIOP_RE_Z, KC_Z),
+ [ASETNIOP_SA_X] = COMBO(combo_inputs_ASETNIOP_SA_X, KC_X),
+ [ASETNIOP_SR_F] = COMBO(combo_inputs_ASETNIOP_SR_F, KC_F),
+ [ASETNIOP_TA_P] = COMBO(combo_inputs_ASETNIOP_TA_P, KC_P),
+ [ASETNIOP_TR_C] = COMBO(combo_inputs_ASETNIOP_TR_C, KC_C),
+ [ASETNIOP_TS_D] = COMBO(combo_inputs_ASETNIOP_TS_D, KC_D),
+ [ASETNIOP_TN_B] = COMBO(combo_inputs_ASETNIOP_TN_B, KC_B),
+ [ASETNIOP_TE_V] = COMBO(combo_inputs_ASETNIOP_TE_V, KC_V),
+ [ASETNIOP_TI_G] = COMBO(combo_inputs_ASETNIOP_TI_G, KC_G),
+ [ASETNIOP_NA_J] = COMBO(combo_inputs_ASETNIOP_NA_J, KC_J),
+ [ASETNIOP_NR_K] = COMBO(combo_inputs_ASETNIOP_NR_K, KC_K),
+ [ASETNIOP_NS_M] = COMBO(combo_inputs_ASETNIOP_NS_M, KC_M),
+ [ASETNIOP_NE_H] = COMBO(combo_inputs_ASETNIOP_NE_H, KC_H),
+ [ASETNIOP_NO_L] = COMBO(combo_inputs_ASETNIOP_NO_L, KC_L),
+ [ASETNIOP_EI_U] = COMBO(combo_inputs_ASETNIOP_EI_U, KC_U),
+ [ASETNIOP_IN_Y] = COMBO(combo_inputs_ASETNIOP_IN_Y, KC_Y),
+};
\ No newline at end of file
diff --git a/keyboards/qmk/keymaps/ferris/config.h b/keyboards/qmk/keymaps/ferris/config.h
new file mode 100644
index 0000000..389901d
--- /dev/null
+++ b/keyboards/qmk/keymaps/ferris/config.h
@@ -0,0 +1,28 @@
+#pragma once
+
+// Tapdance settings
+#define TAPPING_TERM 50
+#define TAPPING_TERM_PER_KEY
+#define RETRO_TAPPING // https://docs.qmk.fm/#/tap_hold?id=retro-tapping
+
+// Space cadet on control key
+#define LCPO_KEYS KC_LCTL, KC_LSFT, KC_0
+
+// // Force constant-speed controls for mouse movement
+// #define MK_3_SPEED
+// #define MK_MOMENTARY_ACCEL
+
+// // Override the mode-2 speed
+// #define MK_C_OFFSET_1 4 // Defaut: 4
+
+// Bootloader settings
+#define BOOTMAGIC_LITE_ROW 0
+#define BOOTMAGIC_LITE_COLUMN 0
+
+// Chording config
+#define FORCE_NKRO
+// #define COMBO_COUNT 3
+#define COMBO_COUNT 21
+
+// Settings for enabling experiments
+#define ENABLE_ASETNIOP
\ No newline at end of file
diff --git a/keyboards/qmk/keymaps/ferris/keymap.c b/keyboards/qmk/keymaps/ferris/keymap.c
new file mode 100644
index 0000000..28768f6
--- /dev/null
+++ b/keyboards/qmk/keymaps/ferris/keymap.c
@@ -0,0 +1,99 @@
+#include QMK_KEYBOARD_H
+
+// Combo magic
+#include "combos.c"
+
+// Layer definitions
+enum ferris_layers {
+ // _HOME,
+ // _QWERTY,
+ _MODMAK,
+ // _NUMERIC,
+ // _UTILITY,
+ // _MACROS,
+ // _RAINBOW,
+};
+
+// Shorthands
+#define LD_TERM LGUI(KC_ENT)
+#define CC_QUIT LGUI(LSFT(KC_Q))
+#define CC_COMM LCTL(KC_SLSH)
+#define CC_FMT LCTL(LSFT(KC_I))
+
+// clang-format off
+const uint16_t PROGMEM keymaps[][MATRIX_ROWS][MATRIX_COLS] = {
+
+ // [_HOME] = LAYOUT(
+ // KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_BSPC,
+ // KC_A, KC_R, KC_S, KC_T, KC_D, KC_H, KC_N, KC_E, KC_I, KC_O,
+ // KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ // KC_LSFT, KC_LCTL, KC_SPACE, KC_NO
+ // ),
+
+ // [_QWERTY] = LAYOUT(
+ // KC_Q, KC_W, KC_E, KC_R, KC_T, KC_Y, KC_U, KC_I, KC_O, KC_P,
+ // KC_A, KC_S, KC_D, KC_F, KC_G, KC_H, KC_J, KC_K, KC_L, KC_SCLN,
+ // KC_Z, KC_X, KC_C, KC_V, KC_B, KC_N, KC_M, KC_COMM, KC_DOT, KC_NO,
+ // KC_LCTL, KC_SPACE, KC_SPACE, KC_LSFT
+ // )
+
+ [_MODMAK] = LAYOUT(
+ KC_Q, KC_W, KC_F, KC_P, KC_G, KC_J, KC_L, KC_U, KC_Y, KC_BSPC,
+ KC_A, KC_R, KC_S, KC_T, KC_D, KC_H, KC_N, KC_E, KC_I, KC_O,
+ KC_LCTL, KC_Z, KC_X, KC_C, KC_V, KC_B, KC_M, KC_K, KC_COMM, KC_DOT,
+ KC_LSFT, KC_LCTL, KC_SPACE, KC_SPACE
+ // KC_LSFT, MO(_MACROS), LT(_UTILITY, KC_SPACE), LT(_NUMERIC, KC_SPACE)
+ ),
+
+ // [_NUMERIC] = LAYOUT(
+ // KC_F1, KC_F2, KC_F3, KC_F4, KC_F5, /**/ KC_F6, KC_F7, KC_F8, KC_F9, KC_F10,
+ // KC_1, KC_2, KC_3, KC_4, KC_5, /**/ KC_6, KC_7, KC_8, KC_9, KC_0,
+ // KC_LALT, KC_NO, KC_NO, KC_NO, TO(_MODMAK), /**/ KC_LGUI, KC_EQL, KC_MINS, KC_F11, KC_F12,
+ // KC_LSFT, KC_LCTL, /**/ KC_NO, KC_NO
+ // ),
+
+ // [_UTILITY] = LAYOUT(
+ // KC_Q, KC_VOLD, KC_VOLU, KC_NO, KC_PSCR, /**/ LD_TERM, KC_HOME, KC_PGUP, KC_PGDN, KC_DEL,
+ // KC_MPRV, KC_MPLY, KC_MNXT, KC_NO, KC_TAB, /**/ KC_LEFT, KC_DOWN, KC_UP, KC_RIGHT, KC_END,
+ // KC_NO, KC_NO, KC_NO, CC_FMT, CC_COMM, /**/ KC_QUOTE, KC_SLSH, KC_LBRC, KC_RBRC, KC_BSLS,
+ // KC_LSFT, KC_LCTL, /**/ KC_NO, KC_LGUI
+ // ),
+
+ // [_MACROS] = LAYOUT(
+ // KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, TO(_RAINBOW),
+ // KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ // KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ // KC_NO, KC_NO, KC_NO, KC_NO
+ // ),
+
+ // [_RAINBOW] = LAYOUT(
+ // KC_ESC, KC_Q, KC_W, KC_E, KC_5, TO(_MODMAK), KC_NO, KC_NO, KC_NO, KC_NO,
+ // KC_LSFT, KC_A, KC_S, KC_D, KC_G, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ // KC_LCTL, KC_Z, KC_X, KC_C, KC_V, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ // KC_SPACE, KC_X, KC_NO, KC_NO
+ // )
+};
+// clang-format on
+
+/** THIS IS FOR CREATING A NEW KEYMAP **
+ [_UNSET_] = LAYOUT(
+ KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO, KC_NO,
+ KC_NO, KC_NO, KC_NO, KC_NO
+ )
+*/
+
+// Overrides for the tapping terms.
+uint16_t get_tapping_term(uint16_t keycode, keyrecord_t *record) {
+ switch (keycode) {
+ // Space cadet needs to be much slower than my default
+ // case SC_LSPO:
+ // case SC_LCPO:
+ // return 200;
+ // case LT(_NUMERIC, KC_SPACE):
+ // return 1000;
+ default:
+ return TAPPING_TERM;
+ }
+}
diff --git a/keyboards/qmk/keymaps/ferris/rules.mk b/keyboards/qmk/keymaps/ferris/rules.mk
new file mode 100644
index 0000000..d02eca0
--- /dev/null
+++ b/keyboards/qmk/keymaps/ferris/rules.mk
@@ -0,0 +1,9 @@
+# Override the default bootloader since I am using a non-standard MCU
+BOOTLOADER = qmk-dfu
+
+# Enable bootloader keys
+BOOTMAGIC_ENABLE = yes
+
+# Set up chording support
+FORCE_NKRO = yes
+COMBO_ENABLE = yes
\ No newline at end of file
diff --git a/keyboards/qmk/keymaps/tg4x/config.h b/keyboards/qmk/keymaps/tg4x/config.h
new file mode 100644
index 0000000..7540ffc
--- /dev/null
+++ b/keyboards/qmk/keymaps/tg4x/config.h
@@ -0,0 +1,26 @@
+#pragma once
+
+// Threshold for things considered a "tap"
+#define TAPPING_TERM 300
+
+// Use Linux unicode mode
+#define UNICODE_SELECTED_MODES UNICODE_MODE_LINUX
+
+// Custom USB HID info
+// #undef VENDOR_ID
+// #undef PRODUCT_ID
+// #undef DEVICE_VER
+// #undef MANUFACTURER
+// #undef PRODUCT
+// #define VENDOR_ID 0x6570 // "ep"
+// #define PRODUCT_ID 0x0458 // 4 "X"
+// #define DEVICE_VER 0x0021
+// #define MANUFACTURER ewpratten
+// #define PRODUCT tg4x
+
+// Lighting settings
+#define RGBLIGHT_DEFAULT_MODE RGBLIGHT_MODE_STATIC_LIGHT
+#define RGBLIGHT_DEFAULT_HUE 0
+#define RGBLIGHT_DEFAULT_SAT 255
+#define RGBLIGHT_DEFAULT_VAL 255
+#define RGBLIGHT_DEFAULT_ON true
diff --git a/keyboards/qmk/keymaps/tg4x/keymap.c b/keyboards/qmk/keymaps/tg4x/keymap.c
new file mode 100644
index 0000000..cedf9af
--- /dev/null
+++ b/keyboards/qmk/keymaps/tg4x/keymap.c
@@ -0,0 +1,47 @@
+// Pull in the QMK lib
+#include QMK_KEYBOARD_H
+
+/* Trickery to make VSCode happy */
+#include
+#define _____ KC_NO
+#define _PASS KC_TRNS
+
+/* Layer Definitions */
+// clang-format off
+enum tg4x_layers {
+ QWERTY,
+ NUMERIC,
+ ACTIONS,
+};
+// clang-format on
+
+/* Layers */
+// clang-format off
+const uint16_t PROGMEM keymaps[][MATRIX_ROWS][MATRIX_COLS] = {
+
+ // QWERTY
+ [QWERTY] = LAYOUT(
+ KC_ESC, KC_Q, KC_W, KC_E, KC_R, KC_T, KC_Y, KC_U, KC_I, KC_O, KC_P, KC_DEL, KC_BSPC,
+ KC_TAB, KC_A, KC_S, KC_D, KC_F, KC_G, KC_H, KC_J, KC_K, KC_L, KC_SCLN, KC_ENT,
+ KC_LSFT, KC_Z, KC_X, KC_C, KC_V, KC_B, KC_N, KC_M, KC_COMM, KC_DOT, KC_RSFT, MO(NUMERIC),
+ KC_LCTL, KC_LALT, KC_LGUI, KC_SPACE, KC_SPACE, MO(ACTIONS), _____, _____, _____
+ ),
+
+ // NUMERIC
+ [NUMERIC] = LAYOUT(
+ KC_GRV, KC_F1, KC_F2, KC_F3, KC_F4, KC_F5, KC_F6, KC_F7, KC_F8, KC_F9, KC_F10, KC_F11, KC_F12,
+ KC_1, KC_2, KC_3, KC_4, KC_5, KC_6, KC_7, KC_8, KC_9, KC_0, KC_MINS, KC_EQL,
+ _PASS, _____, _____, _____, _____, KC_QUOTE, KC_SLSH, KC_LBRC, KC_RBRC, KC_BSLS, _____, _____,
+ _PASS, _PASS, _PASS, KC_SPACE, KC_SPACE, _____, _____, _____, _____
+ ),
+
+ // ACTIONS
+ [ACTIONS] = LAYOUT(
+ _____, KC_VOLD, KC_VOLU, KC_MUTE, _____, _____, _____, KC_PGUP, _____, KC_PGDN, KC_PSCR, KC_SCRL, KC_PAUS,
+ KC_CAPS, KC_MPRV, KC_MPLY, KC_MNXT, _____, _____, KC_LEFT, KC_DOWN, KC_UP, KC_RIGHT, KC_INS, _____,
+ _PASS, RGB_TOG, _____, _____, _____, KC_HOME, KC_END, _____, _____, _____, _PASS, _____,
+ _PASS, _PASS, _PASS, KC_SPACE, UC(0x00A0), _____, _____, _____, _____
+ ),
+
+};
+// clang-format on
diff --git a/keyboards/qmk/keymaps/tg4x/rules.mk b/keyboards/qmk/keymaps/tg4x/rules.mk
new file mode 100644
index 0000000..08d85df
--- /dev/null
+++ b/keyboards/qmk/keymaps/tg4x/rules.mk
@@ -0,0 +1,12 @@
+# Override the default bootloader since I am using a non-standard MCU
+BOOTLOADER = qmk-dfu
+
+# Enable bootloader keys
+BOOTMAGIC_ENABLE = yes
+
+# Enable Unicode
+UNICODE_COMMON = yes
+UNICODE_ENABLE = yes
+
+# Enable RGB lighting
+RGBLIGHT_ENABLE = yes
diff --git a/python_modules/ewconfig/secret_manager.py b/python_modules/ewconfig/secret_manager.py
new file mode 100644
index 0000000..e76ae72
--- /dev/null
+++ b/python_modules/ewconfig/secret_manager.py
@@ -0,0 +1,24 @@
+import logging
+from pathlib import Path
+from typing import Optional
+
+SEMI_SECRET_BASE_PATH = Path("~/.config/ewconfig/secrets/semi-secret").expanduser()
+
+logger = logging.getLogger(__name__)
+
+def get_semi_secret_string(name: str, namespace: Optional[str] = None) -> str:
+ logger.debug(f"Attempting to load secret: {name} (ns: {namespace})")
+
+ # Construct file path
+ file = SEMI_SECRET_BASE_PATH
+ if namespace:
+ file = file / namespace
+ file = file / name
+
+ # Make sure it exists
+ if not file.exists():
+ raise FileNotFoundError(f"Could not load secret from: {file}")
+
+ # Read the value
+ with open(file, "r") as f:
+ return f.read().strip()
diff --git a/python_modules/ewconfig/trello/__init__.py b/python_modules/ewconfig/trello/__init__.py
new file mode 100644
index 0000000..f22a8a7
--- /dev/null
+++ b/python_modules/ewconfig/trello/__init__.py
@@ -0,0 +1,8 @@
+from ..secret_manager import get_semi_secret_string
+
+TRELLO_API_KEY = "fba640a85f15c91e93e6b3f88e59489c"
+"""Public api key to do things to personal Trello"""
+
+
+def get_trello_api_token() -> str:
+ return get_semi_secret_string("trello_api_token")
diff --git a/python_modules/ewconfig/trello/boards.py b/python_modules/ewconfig/trello/boards.py
new file mode 100644
index 0000000..1c991fd
--- /dev/null
+++ b/python_modules/ewconfig/trello/boards.py
@@ -0,0 +1,20 @@
+from dataclasses import dataclass
+from typing import Dict
+
+
+@dataclass
+class TrelloBoardInfo:
+ id: str
+ lists: Dict[str, str]
+ tags: Dict[str, str]
+
+
+PERSONAL_TASKS_BOARD = TrelloBoardInfo(
+ id="tw3Cn3L6",
+ lists={"To Do": "6348a3ce5208f505b61d29bf"},
+ tags={
+ "GURU": "64e03ac77d27032282436d28",
+ "Github: Issue": "64eb5d72fb694cd8f0ba7a8d",
+ "Github: Pull Request": "652d4b775f5c59a8e6308216",
+ },
+)
diff --git a/python_modules/ewconfig/trello/cards.py b/python_modules/ewconfig/trello/cards.py
new file mode 100644
index 0000000..d51e303
--- /dev/null
+++ b/python_modules/ewconfig/trello/cards.py
@@ -0,0 +1,80 @@
+import requests
+import logging
+from typing import Any, Dict, List, Optional
+
+logger = logging.getLogger(__name__)
+
+
+def get_all_trello_cards(
+ board_id: str, api_key: str, api_token: str
+) -> List[Dict[str, Any]]:
+ # Get a list of cards on the board
+ logger.debug(f"Getting all cards on board: {board_id}")
+ response = requests.get(
+ f"https://api.trello.com/1/boards/{board_id}/cards",
+ params={
+ "key": api_key,
+ "token": api_token,
+ },
+ )
+ response.raise_for_status()
+ cards = response.json()
+ logger.debug(f"Found {len(cards)} cards on board: {board_id}")
+ return cards
+
+
+def create_card(
+ list_id: str,
+ name: str,
+ api_key: str,
+ api_token: str,
+ description: Optional[str] = None,
+ label_ids: Optional[List[str]] = None,
+ position: str = "top",
+) -> str:
+ logger.debug(f"Creating card: {name}")
+
+ # Build out params
+ params = {
+ "idList": list_id,
+ "name": name,
+ "key": api_key,
+ "token": api_token,
+ "pos": position,
+ }
+ if description:
+ params["desc"] = description
+ if label_ids:
+ params["idLabels"] = ",".join(label_ids)
+
+ # Make a new card
+ response = requests.post(
+ "https://api.trello.com/1/cards",
+ params=params,
+ )
+ response.raise_for_status()
+
+ # Get the new card's id
+ card_id = response.json()["id"]
+
+ logger.debug(f"Created card: {card_id}")
+ return card_id
+
+
+def add_attachment(
+ card_id: str, api_key: str, api_token: str, url: Optional[str] = None
+) -> None:
+ logger.debug(f"Adding attachment to card: {card_id}")
+ params = {
+ "key": api_key,
+ "token": api_token,
+ }
+ if url:
+ params["url"] = url
+
+ response = requests.post(
+ f"https://api.trello.com/1/cards/{card_id}/attachments",
+ params=params,
+ )
+ response.raise_for_status()
+ logger.debug(f"Added attachment to card: {card_id}")
diff --git a/python_modules/ewpipe/common/dirs.py b/python_modules/ewpipe/common/dirs.py
new file mode 100644
index 0000000..5a4546b
--- /dev/null
+++ b/python_modules/ewpipe/common/dirs.py
@@ -0,0 +1,13 @@
+from pathlib import Path
+
+DCC_DATA_BASE_DIR = Path.home() / "Videos" / "DCC"
+"""The base directory for storing data across DCCs"""
+
+HOUDINI_BASE_DIR = DCC_DATA_BASE_DIR / "Houdini"
+"""The base directory for storing Houdini data"""
+
+HOUDINI_PROJECTS_DIR = HOUDINI_BASE_DIR / "Projects"
+"""The base directory for storing Houdini projects"""
+
+BLENDER_BASE_DIR = DCC_DATA_BASE_DIR / "Blender"
+"""The base directory for storing Blender data"""
diff --git a/python_modules/ewpipe/common/env.py b/python_modules/ewpipe/common/env.py
new file mode 100644
index 0000000..0d1368a
--- /dev/null
+++ b/python_modules/ewpipe/common/env.py
@@ -0,0 +1,31 @@
+import os
+from typing import Dict
+
+
+def diff_environments(env_1: Dict[str, str], env_2: Dict[str, str]) -> Dict[str, str]:
+ """Diff two environments.
+
+ Args:
+ env_1 (Dict[str,str]): First environment
+ env_2 (Dict[str,str]): Second environment
+
+ Returns:
+ Dict[str,str]: Difference between the two environments
+ """
+ return {
+ key: value
+ for key, value in env_1.items()
+ if key not in env_2 or env_2[key] != value
+ }
+
+
+def diff_from_current_env(new_env: Dict[str, str]) -> Dict[str, str]:
+ """Diff the current environment from the given environment.
+
+ Args:
+ new_env (Dict[str, str]): New environment
+
+ Returns:
+ Dict[str, str]: Difference between the current environment and the given environment
+ """
+ return diff_environments(os.environ, new_env) # type: ignore
diff --git a/python_modules/ewpipe/common/logging.py b/python_modules/ewpipe/common/logging.py
new file mode 100644
index 0000000..b45ad94
--- /dev/null
+++ b/python_modules/ewpipe/common/logging.py
@@ -0,0 +1,8 @@
+import logging
+
+
+def configure_logging(verbose: bool = False):
+ logging.basicConfig(
+ level=logging.DEBUG if verbose else logging.INFO,
+ format="%(levelname)s:\t%(message)s",
+ )
diff --git a/python_modules/ewpipe/common/utils/path.py b/python_modules/ewpipe/common/utils/path.py
new file mode 100644
index 0000000..0ba0a2a
--- /dev/null
+++ b/python_modules/ewpipe/common/utils/path.py
@@ -0,0 +1,11 @@
+from pathlib import Path
+
+
+def prepend_if_relative(prefix: Path, possibly_abs_path: Path) -> Path:
+
+ # If absolute, no prepend needed
+ if possibly_abs_path.is_absolute():
+ return possibly_abs_path
+
+ # Otherwise prepend
+ return prefix / possibly_abs_path
diff --git a/python_modules/ewpipe/houdini/editions.py b/python_modules/ewpipe/houdini/editions.py
new file mode 100644
index 0000000..ee96dc0
--- /dev/null
+++ b/python_modules/ewpipe/houdini/editions.py
@@ -0,0 +1,50 @@
+from typing import List
+from pathlib import Path
+
+HOU_EDITIONS = ["core", "fx", "indie", "apprentice"]
+"""All possible Houdini editions."""
+
+
+def get_binary_name_for_edition(edition: str) -> str:
+ """Get the appropriate binary name for the given Houdini edition.
+
+ Args:
+ edition (str): Hooudini edition
+
+ Returns:
+ str: Binary name
+ """
+
+ if edition in ["core", "fx"]:
+ return f"houdini{edition}"
+ else:
+ return "houdini"
+
+
+def get_houdini_edition_args(edition: str) -> List[str]:
+ """Get the appropriate arguments to launch a given Houdini edition.
+
+ Args:
+ edition (str): Houdini edition
+
+ Returns:
+ List[str]: Arguments
+ """
+
+ if edition in ["indie", "apprentice"]:
+ return [f"-{edition}"]
+ else:
+ return []
+
+
+def noncomercialize_path(input_path: Path) -> Path:
+ # Figure out the noncomercial version of the path
+ path_suffix = input_path.suffix
+ noncomercial_path = input_path.with_suffix(f".{path_suffix}nc")
+
+ # If the NC version exists, use it
+ if noncomercial_path.exists():
+ return noncomercial_path
+
+ # All other cases, use the input directly
+ return input_path
diff --git a/python_modules/ewpipe/houdini/environment.py b/python_modules/ewpipe/houdini/environment.py
new file mode 100644
index 0000000..4bceff9
--- /dev/null
+++ b/python_modules/ewpipe/houdini/environment.py
@@ -0,0 +1,37 @@
+from dataclasses import dataclass, field, fields
+from typing import Dict, Optional
+
+
+@dataclass
+class HoudiniEnvironment:
+ script_debug: bool = field(default=True, metadata={"key": "HOUDINI_SCRIPT_DEBUG"})
+ """If set, errors will be printed when loading dialog scripts and scripted operators."""
+
+ show_py_panel_errors_in_console: bool = field(
+ default=True, metadata={"key": "HOUDINI_CONSOLE_PYTHON_PANEL_ERROR"}
+ )
+ """Errors when starting python panels will also be sent to the console, instead of just displaying them within the panel."""
+
+ pdg_node_debug_level: int = field(
+ default=3, metadata={"key": "HOUDINI_PDG_NODE_DEBUG"}
+ )
+ """Determines if PDG should print out node status information during the cook.
+
+ 1: Enable a status print out message each time a node finishes cooking
+ 2: 1 + node error messages
+ 3: Print node generation/cook status, errors and node warnings
+ 4: 3 + print a message for each node callback invocation
+ """
+
+ splash_message: Optional[str] = field(
+ default=None, metadata={"key": "HOUDINI_SPLASH_MESSAGE"}
+ )
+ """Message shown on the splash screen"""
+
+ def to_dict(self) -> Dict[str, str]:
+ output: Dict[str, str] = {}
+ for obj_field in fields(self):
+ field_value = self.__dict__[obj_field.name]
+ if field_value:
+ output[obj_field.metadata["key"]] = str()
+ return output
diff --git a/python_modules/ewpipe/houdini/installations.py b/python_modules/ewpipe/houdini/installations.py
new file mode 100644
index 0000000..a6c009a
--- /dev/null
+++ b/python_modules/ewpipe/houdini/installations.py
@@ -0,0 +1,129 @@
+import logging
+import platform
+import argparse
+import sys
+from pathlib import Path
+from typing import Optional
+
+logger = logging.getLogger(__name__)
+
+
+def get_default_houdini_installation_base_path() -> Path:
+ """Get the default Houdini installation base path.
+
+ Returns:
+ Path: Default Houdini installation base path
+ """
+ if platform.system() == "Linux":
+ return Path("/opt")
+ elif platform.system() == "Windows":
+ return Path("C:/Program Files/Side Effects Software")
+ else:
+ raise RuntimeError(f"Unsupported platform: {platform.system()}")
+
+
+def find_latest_houdini_installation(base_path: Path) -> Optional[Path]:
+ """Find the latest Houdini installation in the given base path.
+
+ Args:
+ base_path (Path): Base path to look for Houdini installations in.
+
+ Returns:
+ Optional[Path]: Houdini installation path if found
+ """
+ logger.debug(f"Looking for the latest Houdini installation in: {base_path}")
+
+ # Look for possible houdini installations
+ if platform.system() == "Linux":
+ possible_installations = sorted(base_path.glob("hfs*"))
+ elif platform.system() == "Windows":
+ possible_installations = sorted(base_path.glob("Houdini *"))
+ else:
+ raise RuntimeError(f"Unsupported platform: {platform.system()}")
+ logger.debug(
+ f"Search found the following Houdini installations: {[str(i) for i in possible_installations]}"
+ )
+
+ # Remove `Houdini Server` if it exists
+ possible_installations = [
+ installation
+ for installation in possible_installations
+ if "Server" not in installation.name
+ ]
+
+ # If there are no installations, return None
+ if not possible_installations:
+ return None
+
+ # Otherwise, return the latest installation
+ latest_installation = possible_installations[-1]
+ logger.debug(f"Latest Houdini installation: {latest_installation}")
+ return latest_installation
+
+
+def get_houdini_installation_path(
+ version: Optional[str] = None,
+ base_path: Optional[Path] = None,
+ not_exists_ok: bool = False,
+) -> Optional[Path]:
+ """Get the path to the Houdini installation for the given version.
+
+ Args:
+ version (Optional[str], optional): Houdini version to target. Defaults to None.
+ not_exists_ok (bool, optional): If true, allows bad paths to be returned. Defaults to False.
+
+ Raises:
+ RuntimeError: Thrown if the platform is not supported.
+
+ Returns:
+ Optional[Path]: Path to the Houdini installation if found
+ """
+
+ logger.debug(f"Finding Houdini installation for version: {version}")
+
+ # Get the default installation base path
+ if not base_path:
+ base_path = get_default_houdini_installation_base_path()
+ logger.debug(f"Searching for Houdini installations in: {base_path}")
+
+ # If we don't have a version, find the latest installation
+ if not version:
+ logger.debug("No version specified, finding latest installation")
+ return find_latest_houdini_installation(base_path)
+
+ # Otherwise, find the installation for the given version
+ if platform.system() == "Linux":
+ installation_path = base_path / f"hfs{version}"
+ elif platform.system() == "Windows":
+ installation_path = base_path / f"Houdini {version}"
+ else:
+ raise RuntimeError(f"Unsupported platform: {platform.system()}")
+
+ # If the installation path does not exist, return None
+ if (not installation_path.exists()) and not not_exists_ok:
+ logger.debug(f"Installation path does not exist: {installation_path}")
+ return None
+
+ # Otherwise, return the installation path
+ logger.debug(f"Found installation path: {installation_path}")
+ return installation_path
+
+
+if __name__ == "__main__":
+ ap = argparse.ArgumentParser()
+ ap.add_argument("--version", "-v", help="Houdini version", type=str)
+ ap.add_argument("--base-path", "-b", help="Houdini base path", type=str)
+ ap.add_argument("--not-exists-ok", help="Allow bad paths", action="store_true")
+ args = ap.parse_args()
+
+ result = get_houdini_installation_path(
+ version=args.version,
+ base_path=Path(args.base_path) if args.base_path else None,
+ not_exists_ok=args.not_exists_ok,
+ )
+ if not result:
+ print("Could not find Houdini", file=sys.stderr)
+ sys.exit(1)
+
+ print(result)
+ sys.exit(0)
diff --git a/scripts/ableton-linux b/scripts/ableton-linux
new file mode 100755
index 0000000..2feb4ea
--- /dev/null
+++ b/scripts/ableton-linux
@@ -0,0 +1,193 @@
+#! /usr/bin/env python3
+import argparse
+import os
+import sys
+import logging
+import subprocess
+import shutil
+import time
+import pypresence
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+WINEASIO_SRC_PATH = Path("~/src/wineasio").expanduser()
+DISCORD_CLIENT_ID = 1175091631913963610
+DISCORD_ICON = "ableton_grey"
+
+
+def build_wineasio():
+ # If the wineasio source directory doesn't exist, clone it
+ if not WINEASIO_SRC_PATH.is_dir():
+ logger.info("Cloning wineasio source")
+ subprocess.check_call(
+ [
+ "git",
+ "clone",
+ "https://github.com/wineasio/wineasio",
+ str(WINEASIO_SRC_PATH),
+ ]
+ )
+ subprocess.check_call(
+ ["git", "submodule", "update", "--init", "--recursive"],
+ cwd=str(WINEASIO_SRC_PATH),
+ )
+
+ # Make sure `pipewire-jack` is installed
+ logger.info("Installing pipewire-jack")
+
+ # Call make to build 64-bit libs
+ logger.info("Building wineasio")
+ try:
+ subprocess.check_call(["make", "64"], cwd=str(WINEASIO_SRC_PATH))
+ except subprocess.CalledProcessError:
+ logger.error("Failed to build wineasio")
+ logger.info(
+ "Make sure you have `pipewire-jack-audio-connection-kit-devel` installed"
+ )
+ logger.info("Make sure you have `wine-devel` installed")
+ sys.exit(1)
+
+ # We need to copy the libs for wine to find them
+ logger.info("Copying wineasio libs")
+ subprocess.check_call(
+ ["sudo", "cp", "build64/wineasio64.dll", "/usr/lib64/wine/x86_64-windows/"],
+ cwd=str(WINEASIO_SRC_PATH),
+ )
+ subprocess.check_call(
+ ["sudo", "cp", "build64/wineasio64.dll.so", "/usr/lib64/wine/x86_64-unix/"],
+ cwd=str(WINEASIO_SRC_PATH),
+ )
+
+
+def bottles_winepfx_from_name(bottle_name: str) -> Path:
+ return Path("~/.local/share/bottles/bottles/").expanduser() / (
+ bottle_name.replace(" ", "-")
+ )
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="ableton-linux", description="Executes Ableton on Linux"
+ )
+ ap.add_argument(
+ "--no-presence", "-n", help="Hide activity from Discord", action="store_true"
+ )
+ ap.add_argument(
+ "--bottle", "-b", help="Use the specified bottle", default="Ableton 11 Suite"
+ )
+ ap.add_argument(
+ "--program", "-p", help="Program to run", default="Ableton Live 11 Suite"
+ )
+ ap.add_argument(
+ "--dry-run", help="Don't actually launch Ableton", action="store_true"
+ )
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Ensure we have bottles
+ if not shutil.which("bottles-cli"):
+ logger.error("You can't do this without bottles installed")
+ return 1
+
+ # Configure discord presence
+ discord_presence = pypresence.Presence(DISCORD_CLIENT_ID)
+ if not args.no_presence:
+ discord_presence.connect()
+ launch_start = int(time.time())
+
+ # Ensure we have wineasio
+ if not (WINEASIO_SRC_PATH / "build64").exists():
+ if not args.no_presence:
+ discord_presence.update(
+ start=launch_start,
+ large_image=DISCORD_ICON,
+ details="Compiling WineASIO...",
+ )
+ build_wineasio()
+
+ # Figure out the wineprefix
+ wineprefix = bottles_winepfx_from_name(args.bottle)
+ logger.info(f"Wine prefix is: {wineprefix}")
+
+ # Ensure that the bottle has the wineasio dll
+ if not (wineprefix / ".wineasio-installed").is_file():
+ logger.info("Registering wineasio")
+ if not args.no_presence:
+ discord_presence.update(
+ start=launch_start,
+ large_image=DISCORD_ICON,
+ details="Registering WineASIO with Ableton...",
+ )
+ subprocess.check_call(
+ [WINEASIO_SRC_PATH / "wineasio-register"],
+ env={"WINEPREFIX": str(wineprefix)},
+ )
+ shutil.copy(
+ WINEASIO_SRC_PATH / "build64" / "wineasio64.dll.so",
+ wineprefix / "drive_c" / "windows" / "system" / "wineasio64.dll",
+ )
+ shutil.copy(
+ WINEASIO_SRC_PATH / "build64" / "wineasio64.dll.so",
+ wineprefix / "drive_c" / "windows" / "system32" / "wineasio64.dll",
+ )
+ (wineprefix / ".wineasio-installed").touch()
+
+ logger.info("Waiting 15 seconds to let wine do its thing")
+ time.sleep(15)
+
+ # Build a modified environment for ableton
+ ableton_env = os.environ.copy()
+ ableton_env.update(
+ {
+ "WINEASIO_NUMBER_INPUTS": "16",
+ "WINEASIO_NUMBER_OUTPUTS": "16",
+ "WINEASIO_CONNECT_TO_HARDWARE": "1",
+ "WINEASIO_PREFERRED_BUFFERSIZE": "2048",
+ "WINEASIO_FIXED_BUFFERSIZE": "1",
+ # "PIPEWIRE_LATENCY": "2048/48000", # Buffer size / sample rate
+ }
+ )
+
+ # Update the presence message
+ if not args.no_presence:
+ discord_presence.update(
+ start=launch_start,
+ large_image=DISCORD_ICON,
+ details="Working on a project",
+ buttons=[
+ {"label": "Check out my music!", "url": "https://ewpratten.com/music"}
+ ],
+ )
+
+ # Launch Ableton via bottles
+ if not args.dry_run:
+ logger.info("Launching Ableton")
+ return_code = subprocess.call(
+ ["bottles-cli", "run", "-b", args.bottle, "-p", args.program],
+ env=ableton_env,
+ )
+ if not args.no_presence:
+ discord_presence.close()
+ return return_code
+
+ else:
+ logger.info("Dry run, not launching Ableton")
+ logger.info("Press enter to continue")
+ input()
+ if not args.no_presence:
+ discord_presence.close()
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/aprs-passcode b/scripts/aprs-passcode
new file mode 100755
index 0000000..a19f1dc
--- /dev/null
+++ b/scripts/aprs-passcode
@@ -0,0 +1,28 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="aprs-passcode",
+ description="Calculate the passcode used for APRS-IS authentication",
+ )
+ ap.add_argument("callsign", help="APRS callsign")
+ args = ap.parse_args()
+
+ # Perform passcode calculation
+ callsign = args.callsign.upper().split("-")[0]
+ code = 0x73E2
+ for i, char in enumerate(callsign):
+ code ^= ord(char) << (8 if not i % 2 else 0)
+ passcode = code & 0x7FFF
+
+ print(passcode)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/aspath b/scripts/aspath
new file mode 100755
index 0000000..54d2803
--- /dev/null
+++ b/scripts/aspath
@@ -0,0 +1,26 @@
+#! /bin/bash
+set -e
+
+# Ensure we were passed a host argument
+if [ -z "$1" ]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+
+# Parse out the ASNs
+asns=$( mtr -jzc1 -Z1 -G0.25 $1 | jq ".report.hubs[].ASN" | tr -d "\"" | uniq )
+
+# For each line
+for asn in $asns; do
+ # Skip "AS???" lines
+ if [[ $asn =~ ^AS\?\?\?$ ]]; then
+ echo "Private Hop"
+ continue
+ fi
+
+ # Get the AS name
+ as_name=$( whois -h whois.radb.net $asn | grep -i ^descr: | cut -d : -f 2- | grep -o -E '\S.*\S|\S' )
+
+ # Print the ASN and AS name
+ echo "$asn - $as_name"
+done
\ No newline at end of file
diff --git a/scripts/basejump b/scripts/basejump
new file mode 100755
index 0000000..43fd062
--- /dev/null
+++ b/scripts/basejump
@@ -0,0 +1,37 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import subprocess
+
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog="basejump")
+ ap.add_argument(
+ "subcommand", help="The subcommand to run", choices=["init", "fetch", "discover"]
+ )
+ ap.add_argument("arguments", nargs=argparse.REMAINDER)
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Execute the appropriate subcommand
+ real_command_name = f"basejump-{args.subcommand}"
+ try:
+ return subprocess.run([real_command_name] + args.arguments).returncode
+ except FileNotFoundError:
+ logger.error(f"Unknown subcommand: {args.subcommand}")
+ logger.error(f"Could not find `{real_command_name}` in $PATH")
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/basejump-discover b/scripts/basejump-discover
new file mode 100755
index 0000000..e7cfb19
--- /dev/null
+++ b/scripts/basejump-discover
@@ -0,0 +1,59 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import subprocess
+import json
+from pprint import pprint
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="basejump discover", description="Discover repos in a codebase"
+ )
+ ap.add_argument("root_path", help="The root path of the codebase", type=Path)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Find all git repos in the codebase
+ logger.info(f"Searching for git repos in: {args.root_path}")
+ repos = []
+ for path in args.root_path.rglob(".git"):
+ repos.append({"path":str(path.parent.absolute())})
+
+ # For each repo, find the upstream
+ logger.info("Finding upstream URLs...")
+ for repo in repos:
+ # Get the upstream URL
+ upstream_url = subprocess.run(
+ ["git", "remote", "get-url", "origin"],
+ cwd=repo["path"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="utf-8",
+ ).stdout.strip()
+
+ # Add the upstream URL to the repo config
+ repo["upstream"] = upstream_url
+
+ # Print the results
+ logger.info("Found the following repos:")
+ print(json.dumps(repos, indent=4))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/basejump-fetch b/scripts/basejump-fetch
new file mode 100755
index 0000000..ce050b1
--- /dev/null
+++ b/scripts/basejump-fetch
@@ -0,0 +1,58 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import json
+import subprocess
+import os
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="basejump fetch", description="Fetches all changes for a whole codebase"
+ )
+ ap.add_argument("name", help="The name of the codebase")
+ ap.add_argument("--pull", help="Perform a full pull", action="store_true")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Ensure that the basejump config dir exists
+ bj_config_dir = Path.home() / ".config" / "basejump"
+ bj_config_dir.mkdir(parents=True, exist_ok=True)
+
+ # Read the codebase config
+ codebase_config_path = bj_config_dir / f"{args.name}.codebase.json"
+ if not codebase_config_path.exists():
+ logger.error(f"Codebase `{args.name}` does not exist")
+ return 1
+ config = json.loads(codebase_config_path.read_text())
+
+ # Handle each repository
+ for repo in config["repos"]:
+ logger.info(f"Fetching {repo['path']}")
+
+ # If we are in pull mode, do a git pull
+ if args.pull:
+ subprocess.run(["git", "pull"], cwd=repo["path"])
+
+ # Otherwise fetch all
+ else:
+ subprocess.run(["git", "fetch", "--all"], cwd=repo["path"])
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/basejump-init b/scripts/basejump-init
new file mode 100755
index 0000000..0013ea7
--- /dev/null
+++ b/scripts/basejump-init
@@ -0,0 +1,75 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import json
+import subprocess
+import os
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="basejump init", description="Creates a new basejump codebase"
+ )
+ ap.add_argument("name", help="The name of the codebase")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Ensure that the basejump config dir exists
+ bj_config_dir = Path.home() / ".config" / "basejump"
+ bj_config_dir.mkdir(parents=True, exist_ok=True)
+
+ # Create a new codebase definition
+ codebase_config_path = bj_config_dir / f"{args.name}.codebase.json"
+
+ # If the path already exists, abort
+ if codebase_config_path.exists():
+ logger.error(f"Codebase `{args.name}` already exists")
+ logger.info(f"Config file at: {codebase_config_path}")
+ return 1
+
+ # Create a template codebase config
+ template_config = {
+ "name": args.name,
+ "repos": [
+ {
+ "path": "/tmp/example",
+ "upstream": "https://github.com/octocat/Hello-World",
+ }
+ ],
+ }
+
+ # Write the template config to disk
+ codebase_config_path.write_text(json.dumps(template_config, indent=4))
+
+ # Open $EDITOR (or vim) to edit the config
+ subprocess.run([os.environ.get("EDITOR", "vim"), str(codebase_config_path)])
+
+ # Iterate through every repo and clone it
+ config = json.loads(codebase_config_path.read_text())
+ for repo in config["repos"]:
+ if Path(repo["path"]).exists():
+ logger.info(f"Skipping {repo['path']}, already exists")
+ continue
+
+ # Do a clone
+ logger.info(f"Cloning {repo['upstream']} into {repo['path']}")
+ subprocess.run(["git", "clone", repo["upstream"], repo["path"]])
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/blink-check b/scripts/blink-check
new file mode 100755
index 0000000..d43188e
--- /dev/null
+++ b/scripts/blink-check
@@ -0,0 +1,101 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import subprocess
+from datetime import datetime
+
+logger = logging.getLogger(__name__)
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog='blink-check', description='Check on a running blink-fetch cron task')
+ ap.add_argument("hostname", help="[user@]hostname[:port] for SSH")
+ ap.add_argument("--image-dir", help="Remote directory containing fetched images", default="/home/ewpratten/Pictures/blink")
+ ap.add_argument("--camera-id", help="Camera ID", default="155295")
+ ap.add_argument("--show-latest", "--show", "-s", help="Download and display the latest image (if possible)", action="store_true")
+ ap.add_argument('-v', '--verbose', help='Enable verbose logging', action='store_true')
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format='%(levelname)s: %(message)s',
+ )
+
+ # List files in image directory
+ logger.info(f"Listing files in: {args.hostname}:{args.image_dir}")
+ result = subprocess.run(
+ [
+ "ssh",
+ args.hostname,
+ "ls",
+ "-1",
+ args.image_dir,
+ ],
+ capture_output=True,
+ text=True,
+ )
+
+ # Check for errors
+ if result.returncode != 0:
+ logger.error(f"Failed to list files in: {args.hostname}:{args.image_dir}")
+ logger.error(result.stderr)
+ return 1
+
+ # Find all frames
+ frames = result.stdout.splitlines()
+ frames = [frame for frame in frames if frame.startswith(f"camera_{args.camera_id}.")]
+ frames = sorted(frames)
+ logger.info(f"Found {len(frames)} frames")
+ if len(frames) > 0:
+ oldest_frame_time = datetime.strptime(frames[0].split('.')[1], "%Y%m%d_%H%M%S")
+ newest_frame_time = datetime.strptime(frames[-1].split('.')[1], "%Y%m%d_%H%M%S")
+ logger.info(f"Oldest frame is from: {oldest_frame_time}")
+ logger.info(f"Newest frame is from: {newest_frame_time}")
+
+ # Download and display the latest image
+ if args.show_latest:
+ if len(frames) > 0:
+ latest_frame = frames[-1]
+ logger.info(f"Downloading latest frame: {latest_frame}")
+ result = subprocess.run(
+ [
+ "scp",
+ f"{args.hostname}:{args.image_dir}/{latest_frame}",
+ "/tmp/blink-latest.jpg",
+ ],
+ capture_output=True,
+ text=True,
+ )
+
+ # Check for errors
+ if result.returncode != 0:
+ logger.error(f"Failed to download latest frame: {latest_frame}")
+ logger.error(result.stderr)
+ return 1
+
+ # Display image
+ logger.info(f"Displaying latest frame: {latest_frame}")
+ result = subprocess.run(
+ [
+ "xdg-open",
+ "/tmp/blink-latest.jpg",
+ ],
+ capture_output=True,
+ text=True,
+ )
+
+ # Check for errors
+ if result.returncode != 0:
+ logger.error(f"Failed to display latest frame: {latest_frame}")
+ logger.error(result.stderr)
+ return 1
+ else:
+ logger.info(f"No frames to display")
+
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/blink-fetch b/scripts/blink-fetch
new file mode 100755
index 0000000..c0eaf79
--- /dev/null
+++ b/scripts/blink-fetch
@@ -0,0 +1,138 @@
+#! /usr/bin/env python3
+# Installation: pip install exif asyncio blinkpy
+import argparse
+import sys
+import logging
+import getpass
+import asyncio
+import exif
+from datetime import datetime
+from blinkpy.blinkpy import Blink
+from blinkpy.auth import Auth
+from blinkpy.helpers.util import json_load
+from pathlib import Path
+from PIL import Image, ImageDraw
+
+logger = logging.getLogger(__name__)
+
+
+def decdeg2dms(dd):
+ mult = -1 if dd < 0 else 1
+ mnt, sec = divmod(abs(dd) * 3600, 60)
+ deg, mnt = divmod(mnt, 60)
+ return mult * deg, mult * mnt, mult * sec
+
+
+async def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="blink-fetch", description="Fetch an image from a Blink camera"
+ )
+ ap.add_argument("--username", help="Blink username", required=True)
+ ap.add_argument("--password", help="Blink password")
+ ap.add_argument("--camera-id", help="Camera ID", default="155295")
+ ap.add_argument("--output-dir", help="Output directory", default="~/Pictures/blink")
+ ap.add_argument(
+ "--copy-latest", help="Copies the latest frame to this path", type=Path
+ )
+ ap.add_argument(
+ "--no-2fa", help="Don't try to get 2FA credentials", action="store_true"
+ )
+ ap.add_argument("--no-exif", help="Don't write EXIF data", action="store_true")
+ ap.add_argument("--exif-camera", help="Camera name", default="Blink Mini")
+ ap.add_argument(
+ "--exif-latitude", "--exif-lat", help="Camera latitude (Decimal Degrees)"
+ )
+ ap.add_argument(
+ "--exif-longitude", "--exif-lng", help="Camera longitude (Decimal Degrees)"
+ )
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Ask for the password if it wasn't provided
+ if args.password is None:
+ args.password = getpass.getpass(prompt="Blink Password: ")
+
+ # Authenticate with Blink servers
+ auth = Auth(
+ {"username": args.username, "password": args.password}, no_prompt=args.no_2fa
+ )
+ blink = Blink()
+ blink.auth = auth
+ await blink.start()
+
+ # Find the requested camera
+ for name, camera in blink.cameras.items():
+ logger.debug(f"Found camera: {name} ({camera.attributes['camera_id']})")
+ if camera.attributes["camera_id"] == args.camera_id:
+ logger.info("Found requested camera")
+ break
+ else:
+ logger.error("Could not find requested camera")
+ return 1
+
+ # Fetch the image
+ logger.info("Fetching image")
+ await camera.snap_picture()
+ await blink.refresh()
+
+ # Create the output directory if it doesn't exist
+ now = datetime.now()
+ out_file = (
+ Path(args.output_dir).expanduser()
+ / f"camera_{args.camera_id}.{now.strftime('%Y%m%d_%H%M%S')}.jpg"
+ )
+ out_file.parent.mkdir(parents=True, exist_ok=True)
+
+ logger.info(f"Writing image to: {out_file}")
+ await camera.image_to_file(str(out_file))
+
+ # Draw the timestamp on the image in the bottom left corner
+ image = Image.open(out_file)
+ draw = ImageDraw.Draw(image)
+ draw.text((0, image.height - 10), now.strftime("%Y-%m-%d %H:%M:%S"), fill=(255, 255, 255), stroke_width=2, stroke_fill=(0, 0, 0))
+ image.save(out_file)
+
+ # Handle EXIF data
+ if not args.no_exif:
+ logger.info("Re-reading image to inject EXIF data")
+ with open(out_file, "rb") as f:
+ image = exif.Image(f)
+
+ # Set the camera type
+ image.model = args.exif_camera
+
+ # If the user provided a latitude and longitude, set it
+ # if args.exif_latitude and args.exif_longitude:
+ # image.gps_latitude = decdeg2dms(float(args.exif_latitude))
+ # image.gps_longitude = decdeg2dms(float(args.exif_longitude))
+ # image.gps_latitude_ref = "N"
+ # image.gps_longitude_ref = "W"
+
+ # Set the timestamp
+ image.datetime_original = now.strftime(exif.DATETIME_STR_FORMAT)
+
+ # Write the EXIF data back to the file
+ logger.info("Writing EXIF data")
+ with open(out_file, "wb") as f:
+ f.write(image.get_file())
+
+ # If we were asked to copy the latest frame, do so
+ if args.copy_latest:
+ logger.info(f"Copying latest frame to: {args.copy_latest}")
+ args.copy_latest.parent.mkdir(parents=True, exist_ok=True)
+ args.copy_latest.write_bytes(out_file.read_bytes())
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(asyncio.run(main()))
diff --git a/scripts/blink-timelapse b/scripts/blink-timelapse
new file mode 100755
index 0000000..24f7c8b
--- /dev/null
+++ b/scripts/blink-timelapse
@@ -0,0 +1,85 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import subprocess
+from datetime import datetime
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="blink-timelapse",
+ description="Generates timelapses from blink image captures",
+ )
+ ap.add_argument("--camera-id", help="Camera ID", default="155295")
+ ap.add_argument("--image-dir", help="Image directory", default="~/Pictures/blink")
+ ap.add_argument(
+ "--output-dir", help="Output directory", default="~/Videos/BlinkTimelapse"
+ )
+ ap.add_argument(
+ "--delete-frames", help="Delete frames after processing", action="store_true"
+ )
+ ap.add_argument("--frame-rate", help="Frame rate", default="5")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Find all frames
+ image_dir = Path(args.image_dir).expanduser()
+ frames = image_dir.glob(f"camera_{args.camera_id}.*.jpg")
+ frames = sorted(frames, key=lambda frame: frame.stat().st_mtime)
+ logger.info(f"Found {len(frames)} frames")
+ logger.info(
+ f"Oldest frame is from: {datetime.fromtimestamp(frames[0].stat().st_mtime)}"
+ )
+ logger.info(
+ f"Newest frame is from: {datetime.fromtimestamp(frames[-1].stat().st_mtime)}"
+ )
+
+ # Create output directory
+ output_dir = Path(args.output_dir).expanduser()
+ output_file = output_dir / f"camera_{args.camera_id}.{datetime.now().strftime('%Y%m%d-%H%M%S')}.mp4"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ # Generate timelapse
+ logger.info(f"Generating timelapse: {output_file}")
+ subprocess.run(
+ [
+ "ffmpeg",
+ "-r",
+ args.frame_rate,
+ "-pattern_type",
+ "glob",
+ "-i",
+ f"{str(image_dir)}/camera_{args.camera_id}.*.jpg",
+ "-c:v",
+ "libx264",
+ "-pix_fmt",
+ "yuv420p",
+ str(output_file),
+ ],
+ check=True,
+ )
+
+ # Delete frames if needed
+ if args.delete_frames:
+ logger.info("Deleting frames")
+ for frame in frames:
+ frame.unlink()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/caddy-se-allow b/scripts/caddy-se-allow
new file mode 100755
index 0000000..95027eb
--- /dev/null
+++ b/scripts/caddy-se-allow
@@ -0,0 +1,5 @@
+#! /bin/bash
+set -ex
+
+sudo chcon -t httpd_sys_content_t $1 -R
+sudo chcon -t httpd_sys_rw_content_t $1 -R
diff --git a/scripts/clippath b/scripts/clippath
new file mode 100755
index 0000000..b170ae9
--- /dev/null
+++ b/scripts/clippath
@@ -0,0 +1,83 @@
+#! /usr/bin/env python
+import pyperclip
+import argparse
+import sys
+import logging
+import pathlib
+import platform
+
+logger = logging.getLogger(__name__)
+
+
+def convert_to_unix(path: str, args: argparse.Namespace) -> str:
+ output = path
+
+ # Switch the slashes to forward slashes
+ output = output.replace("\\", "/")
+
+ # If the path starts with a drive letter, handle it
+ if len(output) >= 2 and output[1] == ":":
+ # Get the drive letter
+ drive_letter = output[0]
+
+ # Strip the front of the path
+ output = output[2:]
+
+ # Add the mount point
+ output = f"{args.drive_letter_mount}/{drive_letter}{output}"
+
+ return output
+
+
+def convert_to_windows(path: str, args: argparse.Namespace) -> str:
+ output = path
+
+ # Flip the path separators
+ output = output.replace("/", "\\")
+
+ return output
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="clippath", description="Manipulates file paths in the clipboard"
+ )
+ ap.add_argument(
+ "--destination-format", "-d", help="Destination format", choices=["windows", "unix"], default="windows"
+ )
+ ap.add_argument("--drive-letter-mount", "-m", help="Mount point for drive letters", default="/mnt")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Read from clipboard
+ clipboard = pyperclip.paste()
+
+ # Convert to the desired format
+ if args.destination_format == "windows":
+ converted = convert_to_windows(clipboard, args)
+ elif args.destination_format == "unix":
+ converted = convert_to_unix(clipboard, args)
+ else:
+ logger.error("Invalid destination format")
+ return 1
+
+ # Put the new path back into the clipboard
+ converted = converted.replace("\n", "").strip().lstrip()
+ pyperclip.copy(converted)
+ logger.info("New path copied to clipboard")
+ logger.info(converted)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/configure-gnome-remote-desktop b/scripts/configure-gnome-remote-desktop
new file mode 100755
index 0000000..cd39885
--- /dev/null
+++ b/scripts/configure-gnome-remote-desktop
@@ -0,0 +1,23 @@
+#! /bin/bash
+# This script will do the work needed to configure Gnome Remote Desktop headlessly.
+# Useful if you need RDP access to a remote machine and forgot to configure it before-hand
+set -e
+
+# Make the keys dir
+KEYS_DIR=~/.config/gnome-remote-desktop/keys
+mkdir -p $KEYS_DIR
+
+# Generate keys
+openssl genrsa -out $KEYS_DIR/tls.key 4096
+openssl req -new -key $KEYS_DIR/tls.key -out $KEYS_DIR/tls.csr
+openssl x509 -req -days 36500 -signkey $KEYS_DIR/tls.key -in $KEYS_DIR/tls.csr -out $KEYS_DIR/tls.crt
+
+# Set the certificates
+grdctl rdp set-tls-cert $KEYS_DIR/tls.crt
+grdctl rdp set-tls-key $KEYS_DIR/tls.key
+grdctl rdp disable-view-only
+
+# Inform of that to run next
+echo "To finish GRD RDP setup, run:\
+\n\tgrdctl rdp set-credentials \
+\n\tgrdctl rdp enable"
\ No newline at end of file
diff --git a/scripts/deadline-gpu-inventory b/scripts/deadline-gpu-inventory
new file mode 100755
index 0000000..5246273
--- /dev/null
+++ b/scripts/deadline-gpu-inventory
@@ -0,0 +1,100 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import pyperclip
+import re
+
+logger = logging.getLogger(__name__)
+
+GPU_FILTERS = [
+ re.compile(r"(Intel\(R\)[a-zA-Z\d ]+)"),
+ re.compile(r"(NVIDIA [a-zA-Z\d ]+)"),
+]
+
+MSRP_ESTIMATES = {
+ "NVIDIA RTX A5000": 5400,
+ "NVIDIA RTX A4000": 1600,
+ "NVIDIA Quadro M5000": 2000,
+ "NVIDIA Geforce RTX 2070": 800
+}
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog='deadline-gpu-inventory', description='Reads Deadline machine info from the clipboard and displays the GPU inventory')
+ ap.add_argument("--format", "-f", help="Output format", choices=["pretty", "tsv", "csv"], default="pretty")
+ ap.add_argument("--guess-cost", "-g", help="Guess the cost of the GPUs", action="store_true")
+ ap.add_argument('-v', '--verbose', help='Enable verbose logging', action='store_true')
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format='%(levelname)s: %(message)s',
+ )
+
+ # Read from clipboard
+ clipboard = pyperclip.paste()
+
+ # Split clipboard into lines
+ lines = clipboard.splitlines()
+
+ # Search for GPUs
+ gpus = []
+ for line in lines:
+ for gpu_filter in GPU_FILTERS:
+ # Find any matches
+ match = gpu_filter.search(line)
+ if match:
+ # Add to list of GPUs
+ gpus.append(match.group(1))
+
+ # If we don't have any GPUs, exit
+ if not gpus:
+ logger.error("No GPUs found.")
+ logger.info("Are you sure you have copied the machine info to the clipboard?")
+ return 1
+
+ # Reshape the list into [(gpu, count)]
+ gpu_counts = {}
+ for gpu in gpus:
+ gpu_counts[gpu] = gpu_counts.get(gpu, 0) + 1
+ max_count = max(gpu_counts.values())
+ gpu_counts = list(gpu_counts.items())
+
+ # Sort the list by count
+ gpu_counts.sort(key=lambda x: x[1], reverse=True)
+
+ if args.format == "pretty":
+ # Figure out the width of the largest count
+ count_width = len(str(max_count))
+
+ # Print the list
+ for gpu, count in gpu_counts:
+ print(f"{count:>{count_width}} {gpu}")
+
+ elif args.format == "tsv":
+ # Print the list
+ for gpu, count in gpu_counts:
+ print(f"{count}\t{gpu}")
+
+ elif args.format == "csv":
+ # Print the list
+ print("Count,GPU")
+ for gpu, count in gpu_counts:
+ print(f"{count},{gpu}")
+
+ # If cost guessing is enabled, print the estimated cost
+ if args.guess_cost:
+ total_cost = 0
+ known_cost_gpu_count = 0
+ for gpu, count in gpu_counts:
+ if gpu in MSRP_ESTIMATES:
+ total_cost += MSRP_ESTIMATES[gpu] * count
+ known_cost_gpu_count += 1
+ print(f"Estimated cost: ${total_cost}. ({known_cost_gpu_count} models included in estimate)")
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/deadline-read-options b/scripts/deadline-read-options
new file mode 100755
index 0000000..fb622c5
--- /dev/null
+++ b/scripts/deadline-read-options
@@ -0,0 +1,44 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import configparser
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ description="Nicely lists the options stored in a Deadline .options file"
+ )
+ ap.add_argument("options_file", type=str, help="The .options file to read")
+ ap.add_argument(
+ "--show-types", action="store_true", help="Show the type of each option"
+ )
+ ap.add_argument(
+ "--show-defaults",
+ action="store_true",
+ help="Show the default value of each option",
+ )
+ args = ap.parse_args()
+
+ # Read the file
+ with open(args.options_file, "r") as f:
+ config_string = f.read()
+
+ # Parse the file
+ config = configparser.ConfigParser()
+ config.read_string(config_string)
+
+ # List each option
+ for section in config.sections():
+ print(f"{section}{'*' if bool(config[section].get('Required', False)) else ''}: {config[section].get('Description', 'No description')}")
+ if args.show_types:
+ print(f" Type: {config[section].get('Type', 'No type')}")
+ if args.show_defaults and "Default" in config[section]:
+ print(f" Default: {config[section]['Default']}")
+
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/ewp-secrets b/scripts/ewp-secrets
new file mode 100755
index 0000000..3e2faa0
--- /dev/null
+++ b/scripts/ewp-secrets
@@ -0,0 +1,182 @@
+#! /usr/bin/env python3
+"""Evan's Secrets tool
+
+This aims to wrap the different secret management tools used on systems I work with.
+For now, this only targets `secret-tool`, but I plan to add more in the future.
+"""
+
+import argparse
+import sys
+import logging
+import shutil
+import subprocess
+import sqlite3
+from pathlib import Path
+from typing import Optional
+from abc import ABC, abstractmethod
+
+logger = logging.getLogger(__name__)
+
+__all__ = ["EwpSecrets"]
+
+
+class __SecretManager(ABC):
+ @abstractmethod
+ def runs_on_this_system(self) -> bool: ...
+
+ @abstractmethod
+ def store(self, namespace: str, key: str, secret: str): ...
+
+ @abstractmethod
+ def load(self, namespace: str, key: str) -> Optional[str]: ...
+
+
+class GnomeKeyringSM(__SecretManager):
+
+ def runs_on_this_system(self) -> bool:
+ return shutil.which("secret-tool") is not None
+
+ def store(self, namespace: str, key: str, secret: str):
+ process = subprocess.Popen(
+ [
+ "secret-tool",
+ "store",
+ "--label",
+ "Secret stored by ewp-secrets",
+ namespace,
+ key,
+ ],
+ stdin=subprocess.PIPE,
+ )
+ process.communicate(input=secret.encode())
+ process.wait()
+
+ def load(self, namespace: str, key: str) -> Optional[str]:
+ try:
+ process = subprocess.run(
+ ["secret-tool", "lookup", namespace, key],
+ check=True,
+ capture_output=True,
+ )
+ return process.stdout.decode()
+ except subprocess.CalledProcessError:
+ return None
+
+
+class FilesystemSM(__SecretManager):
+
+ def __init__(
+ self,
+ storage_path: Path = Path("~/.config/ewp-secrets/storage.sqlite3").expanduser(),
+ ):
+ # If the file doesn't exist, create it and restrict access
+ if not storage_path.exists():
+ storage_path.parent.mkdir(parents=True, exist_ok=True)
+ storage_path.touch()
+ storage_path.chmod(0o600)
+
+ self.conn = sqlite3.connect(storage_path)
+ self.conn.execute(
+ """CREATE TABLE IF NOT EXISTS secrets (
+ namespace TEXT,
+ key TEXT,
+ secret TEXT,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ accessed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (namespace, key)
+ )
+ """
+ )
+
+ def runs_on_this_system(self) -> bool:
+ return True
+
+ def store(self, namespace: str, key: str, secret: str):
+ self.conn.execute(
+ """INSERT INTO secrets (namespace, key, secret)
+ VALUES (?, ?, ?)
+ ON CONFLICT (namespace, key) DO UPDATE SET
+ (secret, updated_at) = (?, CURRENT_TIMESTAMP)
+ """,
+ (namespace, key, secret, secret),
+ )
+
+ def load(self, namespace: str, key: str) -> Optional[str]:
+ cursor = self.conn.execute(
+ """SELECT secret FROM secrets WHERE namespace = ? AND key = ?""",
+ (namespace, key),
+ )
+ result = cursor.fetchone()
+ if result:
+ self.conn.execute(
+ """UPDATE secrets SET accessed_at = CURRENT_TIMESTAMP
+ WHERE namespace = ? AND key = ?""",
+ (namespace, key),
+ )
+ return result[0]
+ return None
+
+
+class EwpSecrets:
+ def __init__(self):
+ all_secret_managers = [GnomeKeyringSM(), FilesystemSM()]
+ self.secret_managers = [
+ sm for sm in all_secret_managers if sm.runs_on_this_system()
+ ]
+ assert self.secret_managers, "No secret managers available on this system"
+
+ def store(self, namespace: str, key: str, secret: str):
+ # Only write to the first (best) secret manager
+ self.secret_managers[0].store(namespace, key, secret)
+
+ def load(self, namespace: str, key: str) -> Optional[str]:
+ # Try to read from each secret manager until we find the secret
+ for sm in self.secret_managers:
+ secret = sm.load(namespace, key)
+ if secret:
+ return secret
+ return None
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="ewp-secrets", description="Store and load secrets"
+ )
+ ap.add_argument("action", help="Action to perform", choices=["store", "load"])
+ ap.add_argument(
+ "-n", "--namespace", help="Namespace to store secrets in", required=True
+ )
+ ap.add_argument("-k", "--key", help="Key to store secret under")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Access the secret manager
+ secrets = EwpSecrets()
+
+ # Perform the requested action
+ if args.action == "store":
+ secret = input("Enter the secret: ")
+ secrets.store(args.namespace, args.key, secret)
+ return 0
+ elif args.action == "load":
+ secret = secrets.load(args.namespace, args.key)
+ if secret:
+ print(secret)
+ return 0
+ else:
+ print("No secret found", file=sys.stderr)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/ewp-sendmail b/scripts/ewp-sendmail
new file mode 100755
index 0000000..3dcdc13
--- /dev/null
+++ b/scripts/ewp-sendmail
@@ -0,0 +1,66 @@
+#! /usr/bin/env python3
+import smtplib
+import getpass
+from email.message import EmailMessage
+import argparse
+import sys
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="ewp-sendmail", description="Easily send real emails"
+ )
+ ap.add_argument(
+ "--recipient",
+ "--to",
+ help="Email address of the recipient",
+ default="evan@ewpratten.com",
+ )
+ ap.add_argument("--cc", help="Email addresses to CC", nargs="+")
+ ap.add_argument("--subject", "-s", help="Subject of the email")
+ ap.add_argument(
+ "--from",
+ help="Sender of the email",
+ default="system-reports@ewpratten.com",
+ dest="sender",
+ )
+ ap.add_argument(
+ "--password",
+ help="Password to use for sending the email.",
+ )
+ args = ap.parse_args()
+
+ # Read the body from stdin
+ print("Enter the body of the email. Press Ctrl+D when done.")
+ body = sys.stdin.read()
+
+ # Read the password
+ password = args.password or getpass.getpass(f"Password for {args.sender}: ")
+
+ # Log in to the SMTP server
+ print("Connecting to SMTP server...")
+ smtp = smtplib.SMTP("smtp.ewpratten.com", 587)
+ smtp.ehlo()
+ smtp.starttls()
+ print("Authenticating...")
+ smtp.login(args.sender, password)
+ print("Sending email...")
+ # Create the email
+ msg = EmailMessage()
+ msg.set_content(body)
+ msg["Subject"] = args.subject
+ msg["From"] = args.sender
+ msg["To"] = args.recipient
+ if args.cc:
+ msg["Cc"] = ",".join(args.cc)
+ # Send the email
+ smtp.send_message(msg)
+ print("Done.")
+ smtp.quit()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/ezlink b/scripts/ezlink
new file mode 100755
index 0000000..13ae9f6
--- /dev/null
+++ b/scripts/ezlink
@@ -0,0 +1,60 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import subprocess
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog="ezlink", description="Easier symlink took")
+ ap.add_argument("pointer", help="Link that points to the destination", type=Path)
+ ap.add_argument("destination", help="Destination of the link", type=Path)
+ ap.add_argument(
+ "-f", "--force", help="Force the link to be created", action="store_true"
+ )
+ ap.add_argument(
+ "--hard", help="Link directly to the destination inode", action="store_true"
+ )
+ ap.add_argument("--absolute", "-a", help="Use absolute paths", action="store_true")
+ ap.add_argument(
+ "--dereference-destination",
+ help="Follow the destination if it is also a pointer",
+ action="store_true",
+ )
+ ap.add_argument(
+ "--dry-run", help="Don't actually create the link", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Convert to absolute paths if requested
+ if args.absolute:
+ args.pointer = args.pointer.absolute()
+ args.destination = args.destination.absolute()
+
+ # Construct the appropriate LN command
+ command = ["ln"]
+ if not args.dereference_destination:
+ command.append("-n")
+ if not args.hard:
+ command.append("-s")
+ if args.force:
+ command.append("-f")
+ command.append(str(args.destination))
+ command.append(str(args.pointer))
+
+ # Print the command
+ print(" ".join(command))
+
+ # Run the command if not a dry run
+ if not args.dry_run:
+ return subprocess.run(command).returncode
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/ezlink3 b/scripts/ezlink3
new file mode 100755
index 0000000..627dea3
--- /dev/null
+++ b/scripts/ezlink3
@@ -0,0 +1,4 @@
+#! /bin/bash
+set -e
+
+python3 $(which ezlink) $@
\ No newline at end of file
diff --git a/scripts/fetch-steamdeck-screenshots b/scripts/fetch-steamdeck-screenshots
new file mode 100755
index 0000000..2e758e7
--- /dev/null
+++ b/scripts/fetch-steamdeck-screenshots
@@ -0,0 +1,38 @@
+#! /bin/bash
+set -e
+
+# If no arguments are given, print usage and exit
+if [ $# -eq 0 ]; then
+ echo "Usage: $0 "
+ exit 1
+fi
+
+# Prepare the script to run on the remote host
+cat < /tmp/steamdeck-screenshot.sh
+#!/bin/bash
+set -e
+
+# Clean the screenshots directory
+rm -rf /tmp/screenshot-bundle || true
+mkdir -p /tmp/screenshot-bundle
+
+# Copy all screenshots to the bundle directory
+IMAGES=\$(find /home/deck/.local/share/Steam/userdata | grep "screenshots/[0-9]")
+for pathname in \$IMAGES; do
+ echo "Copying \$pathname"
+ cp --preserve=timestamps "\$pathname" /tmp/screenshot-bundle
+done
+
+# Compress the bundle directory
+tar -czf /tmp/screenshot-bundle.tar.gz /tmp/screenshot-bundle
+EOF
+
+# Copy the script to the remote host
+scp /tmp/steamdeck-screenshot.sh $1:/tmp/steamdeck-screenshot.sh
+
+# Run the script on the remote host
+ssh $1 bash /tmp/steamdeck-screenshot.sh
+
+# Copy the bundle from the remote host
+TIMESTAMP=$(date +%Y%m%d-%H%M%S)
+scp $1:/tmp/screenshot-bundle.tar.gz ~/Downloads/steamdeck_screenshots_${TIMESTMAP}.tar.gz
\ No newline at end of file
diff --git a/scripts/git-authors-multirepo b/scripts/git-authors-multirepo
new file mode 100755
index 0000000..88f6866
--- /dev/null
+++ b/scripts/git-authors-multirepo
@@ -0,0 +1,63 @@
+#! /usr/bin/env python
+import subprocess
+import argparse
+import sys
+from pathlib import Path
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ description="git authors, but for multiple repos at once"
+ )
+ ap.add_argument(
+ "--start",
+ help="Directory to start walking from",
+ default=Path("."),
+ type=Path,
+ )
+ ap.add_argument("--log-repos", help="Log the repos found", action="store_true")
+ args = ap.parse_args()
+
+ # Find every subdirectory that is a git repo
+ git_dirs = []
+ all_dirs_recursive = list(args.start.glob("**/*.git"))
+ for path in all_dirs_recursive:
+ git_dirs.append(path.parent)
+ if args.log_repos:
+ print(f"Reading GIT repo at: {path.parent}")
+
+ # Collect the results of `git authors` from each repo
+ authors = []
+ for git_dir in git_dirs:
+ output = subprocess.check_output(["git", "authors"], cwd=git_dir)
+ lines = output.split(b"\n")
+ for line in lines:
+ try:
+ line = line.decode("utf-8")
+ except UnicodeDecodeError:
+ continue
+ if line and len(line.split("\t")) >1:
+ commits, author = line.split("\t", 1)
+ authors.append((int(commits.strip()), author))
+
+ # Combine the results
+ combined = {}
+ for author in authors:
+ if author[1] not in combined:
+ combined[author[1]] = 0
+ combined[author[1]] += author[0]
+
+ # Convert back to a list
+ authors = [(combined[author], author) for author in combined]
+ authors.sort(reverse=True)
+
+ # Print
+ for author in authors:
+ print(f"{author[0]}\t{author[1]}")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/git-authors-recursive b/scripts/git-authors-recursive
new file mode 100755
index 0000000..71494c8
--- /dev/null
+++ b/scripts/git-authors-recursive
@@ -0,0 +1,20 @@
+#! /bin/bash
+
+git submodule foreach git authors | grep -v "^Entering" | python3 -c '
+
+import sys
+lines = sys.stdin.read().splitlines()
+stats = {}
+for line in lines:
+ count, author = line.lstrip().split("\t")
+ if author not in stats:
+ stats[author] = 0
+ stats[author] += int(count)
+
+stats = list(stats.items())
+stats.sort(key=lambda s: s[1], reverse=True)
+
+for author, count in stats:
+ print(f"{count}\t{author}")
+
+'
diff --git a/scripts/git-detect-fake-authors b/scripts/git-detect-fake-authors
new file mode 100755
index 0000000..5db63e8
--- /dev/null
+++ b/scripts/git-detect-fake-authors
@@ -0,0 +1,6 @@
+#! /bin/bash
+# This script finds every time someone makes a commit under someone else's name
+# NOTE: This includes co-authoring
+set -e
+
+git-log-sqlite -q 'SELECT committer as Real, author as Fake, count(*) as Count FROM commits WHERE author NOT LIKE committer GROUP BY author, committer ORDER BY count DESC;'
\ No newline at end of file
diff --git a/scripts/git-log-sqlite b/scripts/git-log-sqlite
new file mode 100755
index 0000000..d10782f
--- /dev/null
+++ b/scripts/git-log-sqlite
@@ -0,0 +1,182 @@
+#! /usr/bin/env python3
+import subprocess
+import sqlite3
+import argparse
+import sys
+import logging
+from pathlib import Path
+from typing import Dict
+
+logger = logging.getLogger(__name__)
+
+FIELDS = {
+ "ct": "timestamp",
+ "aN": "author",
+ "aE": "email",
+ "cN": "committer",
+ "cE": "committer_email",
+ "s": "subject",
+ "b": "body",
+ "N": "notes",
+}
+
+
+def read_properties() -> Dict[str, Dict[str, str]]:
+ output = {}
+ for field in FIELDS:
+ # Construct the log request
+ format_str = f"%H %{field}%x00"
+
+ # Get the results
+ repo_results = subprocess.run(
+ ["git", "log", f"--format=format:{format_str}"],
+ capture_output=True,
+ text=True,
+ ).stdout
+ submodule_results = subprocess.run(
+ [
+ "git",
+ "submodule",
+ "foreach",
+ "git",
+ "log",
+ f"--format=format:{format_str}",
+ ],
+ capture_output=True,
+ text=True,
+ ).stdout
+
+ # Parse the results
+ all_results = repo_results + submodule_results
+ all_results = all_results.split("\x00")
+ for result in all_results:
+ if " " not in result or result == "":
+ continue
+ commit_hash, value = result.split(" ", 1)
+ if commit_hash.startswith("Entering"):
+ continue
+ if commit_hash.startswith("\n"):
+ commit_hash = commit_hash[1:]
+ if commit_hash not in output:
+ output[commit_hash] = {}
+ output[commit_hash][field] = value
+
+ return output
+
+
+def create_table(cursor: sqlite3.Cursor) -> None:
+ sql = "CREATE TABLE IF NOT EXISTS commits (hash TEXT PRIMARY KEY, "
+ for field in FIELDS.values():
+ ty = "TEXT"
+ if field == "timestamp":
+ ty = "INTEGER"
+ if field == "hash":
+ ty = "TEXT PRIMARY KEY"
+
+ sql += f"{field} {ty}, "
+ sql = sql[:-2] + ")"
+ logger.debug(f"Creating table with SQL: {sql}")
+ cursor.execute(sql)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="git-log-sqlite", description="Interact with the git log using SQL"
+ )
+ ap.add_argument(
+ "--dump",
+ help="Path to a sqlite3 database file to dump contents to. DELETES EXISTING FILE",
+ type=Path,
+ )
+ ap.add_argument(
+ "--interactive",
+ "-i",
+ help="Start an interactive SQL session",
+ action="store_true",
+ )
+ ap.add_argument("--query", "-q", help="Run a query and print the results")
+ ap.add_argument("--no-header", help="Do not print the header", action="store_true")
+ ap.add_argument("--mode", help="Set the mode for the sqlite3 command", default="table")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Interactive mode and query mode are mutually exclusive
+ if args.interactive and args.query:
+ logger.error("Interactive mode and query mode are mutually exclusive")
+ return 1
+
+ # If the user didn't specify anything, print the help message
+ if not (args.interactive or args.query):
+ ap.print_help()
+ return 1
+
+ # Read the properties
+ commits = read_properties()
+ logger.debug(f"Read {len(commits)} commits")
+
+ # Open a connection to the database
+ if args.dump:
+ args.dump.parent.mkdir(parents=True, exist_ok=True)
+ args.dump.unlink(missing_ok=True)
+ conn = sqlite3.connect(args.dump if args.dump else ":memory:")
+ cursor = conn.cursor()
+
+ # Create a table to store the data
+ create_table(cursor)
+
+ # Insert the data into the table
+ rows = list(commits.items())
+ rows.sort(key=lambda x: x[1]["ct"])
+ for commit_hash, data in rows:
+ sql = "INSERT INTO commits VALUES (" + ",".join(["?"] * (len(FIELDS) + 1)) + ")"
+ values = [commit_hash] + [data.get(field, None) for field in FIELDS.keys()]
+ cursor.execute(sql, values)
+
+ # Commit the changes
+ conn.commit()
+
+ # If just dumping, we are done
+ if args.dump:
+ conn.close()
+ return 0
+
+ # Dump to a temp file
+ import tempfile
+
+ temp_file = Path(tempfile.mkstemp()[1])
+ temp_conn = sqlite3.connect(temp_file)
+ temp_conn.executescript("\n".join(conn.iterdump()))
+ temp_conn.commit()
+ conn.close()
+
+ # Build the base sqlite command
+ sqlite_cmd = ["sqlite3", "--cmd", f".mode {args.mode}"]
+ if not args.no_header:
+ sqlite_cmd.append("--cmd")
+ sqlite_cmd.append(".headers on")
+
+ # If running a query, do so
+ if args.query:
+ subprocess.run(sqlite_cmd + [temp_file, args.query])
+
+ # If running interactively, do so
+ if args.interactive:
+ subprocess.run(sqlite_cmd + ["--interactive", temp_file])
+
+ # Delete the temp file
+ temp_file.unlink()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/github-to-trello b/scripts/github-to-trello
new file mode 100755
index 0000000..6cf0063
--- /dev/null
+++ b/scripts/github-to-trello
@@ -0,0 +1,176 @@
+#! /usr/bin/env python3
+
+# fmt:off
+import sys
+import os
+from pathlib import Path
+sys.path.append((Path(os.environ["EWCONFIG_ROOT"]) / "python_modules").as_posix())
+# fmt:on
+
+import argparse
+import sys
+import logging
+import requests
+from pathlib import Path
+from dataclasses import dataclass, field
+from typing import List, Optional, Dict, Any
+from ewconfig.secret_manager import get_semi_secret_string
+from ewconfig.trello import TRELLO_API_KEY, get_trello_api_token
+from ewconfig.trello.cards import get_all_trello_cards, create_card, add_attachment
+from ewconfig.trello.boards import PERSONAL_TASKS_BOARD
+
+logger = logging.getLogger(__name__)
+
+GITHUB_API_VERSION = "2022-11-28"
+GITHUB_PAT = get_semi_secret_string("github_pat", namespace="trello-sync")
+TRELLO_API_TOKEN = get_trello_api_token()
+
+
+def get_all_issues() -> List[Dict[str, Any]]:
+ issues = []
+
+ # Get all issues assigned to me
+ response = requests.get(
+ "https://api.github.com/issues",
+ headers={
+ "Authorization": f"token {GITHUB_PAT}",
+ "Accept": "application/vnd.github.raw+json",
+ "X-GitHub-Api-Version": GITHUB_API_VERSION,
+ },
+ params={"state": "open", "per_page": 100},
+ )
+ response.raise_for_status()
+ issues.extend(response.json())
+
+ # Get all issues that mention me
+ response = requests.get(
+ "https://api.github.com/user/issues",
+ headers={
+ "Authorization": f"token {GITHUB_PAT}",
+ "Accept": "application/vnd.github.raw+json",
+ "X-GitHub-Api-Version": GITHUB_API_VERSION,
+ },
+ params={"state": "open", "per_page": 100, "filter": "mentioned"},
+ )
+ response.raise_for_status()
+ issues.extend(response.json())
+
+ # Get all issues that exist in my repos
+ response = requests.get(
+ "https://api.github.com/user/issues",
+ headers={
+ "Authorization": f"token {GITHUB_PAT}",
+ "Accept": "application/vnd.github.raw+json",
+ "X-GitHub-Api-Version": GITHUB_API_VERSION,
+ },
+ params={"state": "open", "per_page": 100, "filter": "repos"},
+ )
+ response.raise_for_status()
+ issues.extend(response.json())
+
+ # Get all issues that I have made in other people's repos
+ response = requests.get(
+ "https://api.github.com/user/issues",
+ headers={
+ "Authorization": f"token {GITHUB_PAT}",
+ "Accept": "application/vnd.github.raw+json",
+ "X-GitHub-Api-Version": GITHUB_API_VERSION,
+ },
+ params={"state": "open", "per_page": 100, "filter": "subscribed"},
+ )
+ response.raise_for_status()
+ issues.extend(response.json())
+
+ # De-dupe issues
+ issues = list({issue["id"]: issue for issue in issues}.values())
+
+ return issues
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog="", description="")
+ ap.add_argument("--dry-run", help="Don't actually do anything", action="store_true")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Get a list of all issues assigned to me
+ my_issues = get_all_issues()
+ logger.info(f"Found {len(my_issues)} issues assigned to me")
+
+ # Get all cards on the personal tasks board
+ trello_cards = get_all_trello_cards(
+ board_id=PERSONAL_TASKS_BOARD.id,
+ api_key=TRELLO_API_KEY,
+ api_token=TRELLO_API_TOKEN,
+ )
+ logger.info(f"Found {len(trello_cards)} cards in Trello")
+
+ # Handle each GitHub issue
+ for issue in my_issues:
+ # Ignore archived repos
+ if issue["repository"]["archived"]:
+ logger.info(f"Ignoring archived repo: {issue['repository']['full_name']}")
+ continue
+
+ # Ignore anything by dependabot
+ if issue["user"]["login"] == "dependabot[bot]":
+ logger.debug(f"Ignoring dependabot issue: {issue['repository']['full_name']}#{issue['number']}")
+ continue
+
+ # Search each card for anything that links to the github issue
+ for card in trello_cards:
+ if issue["html_url"] in card["desc"]:
+ logger.info(
+ f"Found GitHub Issue {issue['number']} in Trello Card {card['id']}"
+ )
+ break
+ else:
+ logger.info(
+ f"Going to create trello card for GitHub Issue: [{issue['repository']['full_name']}] {issue['title']}"
+ )
+ if not args.dry_run:
+ # Check if this is an issue or pr
+ is_pr = "pull_request" in issue
+ type_label = (
+ PERSONAL_TASKS_BOARD.tags["Github: Pull Request"]
+ if is_pr
+ else PERSONAL_TASKS_BOARD.tags["Github: Issue"]
+ )
+
+ # Create a new trello card for this issue
+ card_id = create_card(
+ list_id=PERSONAL_TASKS_BOARD.lists["To Do"],
+ name=f"[{issue['repository']['full_name']}] {issue['title']}",
+ description=(
+ f"**GitHub Link:** [`{issue['repository']['full_name']}#{issue['number']}`]({issue['html_url']})\n\n"
+ f"**Author:** [`{issue['user']['login']}`]({issue['user']['html_url']})\n\n"
+ "---"
+ ),
+ label_ids=[type_label],
+ api_key=TRELLO_API_KEY,
+ api_token=TRELLO_API_TOKEN,
+ )
+ add_attachment(
+ card_id=card_id,
+ api_key=TRELLO_API_KEY,
+ api_token=TRELLO_API_TOKEN,
+ url=issue["html_url"],
+ )
+ logger.info(
+ f"Created Trello Card {card_id} for GitHub Issue {issue['repository']['full_name']}#{issue['number']}"
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/gitweb-fix-modification-date b/scripts/gitweb-fix-modification-date
new file mode 100755
index 0000000..5924675
--- /dev/null
+++ b/scripts/gitweb-fix-modification-date
@@ -0,0 +1,7 @@
+#! /bin/bash
+set -e
+
+LAST_MODIFIED_FILE="$(git rev-parse --git-dir)"/info/web/last-modified
+
+mkdir -p "$(dirname "$LAST_MODIFIED_FILE")"
+git for-each-ref --sort=-authordate --count=1 --format='%(authordate:iso8601)' >"$LAST_MODIFIED_FILE"
\ No newline at end of file
diff --git a/scripts/gp-upload b/scripts/gp-upload
new file mode 100755
index 0000000..7cd8e8d
--- /dev/null
+++ b/scripts/gp-upload
@@ -0,0 +1,217 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import requests
+import socket
+import urllib.parse
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+G_CLIENT_ID = "107923498573-ruh1uhkfe1t5f18vam6sckq7pqer1vmg.apps.googleusercontent.com"
+G_SCOPES = ["https://www.googleapis.com/auth/photoslibrary.appendonly"]
+G_REDIRECT_URI = "http://localhost:7842"
+
+
+def get_google_oauth_token() -> str:
+ """Either log the user in, or used a stored refresh token to get an OAuth token"""
+ refresh_token_path = Path("~/.config/gp-upload/refresh-token").expanduser()
+ client_secret_path = Path("~/.config/gp-upload/client-secret").expanduser()
+
+ # Read the client secret
+ with client_secret_path.open("r") as f:
+ client_secret = f.read().strip()
+
+ # Check if we have a refresh token
+ if refresh_token_path.exists():
+ logger.info("Using stored refresh token")
+
+ # Read the refresh token
+ with refresh_token_path.open("r") as f:
+ refresh_token = f.read().strip()
+
+ # Make the request
+ response = requests.post(
+ "https://oauth2.googleapis.com/token",
+ data={
+ "client_id": G_CLIENT_ID,
+ "grant_type": "refresh_token",
+ "refresh_token": refresh_token,
+ "client_secret": client_secret,
+ },
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error("Failed to get OAuth token")
+ logger.error(response.text)
+ return None
+
+ # Return the OAuth token
+ return response.json()["access_token"]
+
+ # Otherwise, log the user in
+ else:
+ logger.info("Logging user in")
+
+ # Direct the user to Google's login page
+ logger.info("Please visit the following URL to log in:")
+ logger.info(
+ f"https://accounts.google.com/o/oauth2/v2/auth?client_id={G_CLIENT_ID}&response_type=code&scope={'+'.join(G_SCOPES)}&redirect_uri={G_REDIRECT_URI}&access_type=offline&prompt=consent"
+ )
+
+ # Open a TCP server to listen for the redirect
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.bind(("localhost", 7842))
+ s.listen()
+
+ # Wait for the redirect
+ conn, addr = s.accept()
+ with conn:
+ # Read the request
+ request = conn.recv(1024).decode("utf-8")
+
+ # Parse the request
+ request = request.splitlines()
+ request = [line for line in request if line.startswith("GET")]
+ request = request[0].split(" ")[1]
+ request = request.split("?")[1]
+ request = request.split("&")
+ request = {key: urllib.parse.unquote(value) for key, value in [pair.split("=") for pair in request]}
+
+ # Check for errors
+ if "error" in request:
+ logger.error(f"Failed to log in: {request['error']}")
+ conn.sendall(b"HTTP/1.1 500 Internal Server Error\n\nFailed to log in
")
+ conn.close()
+ return None
+
+ # Return a message to the user and close the socket
+ conn.sendall(b"HTTP/1.1 200 OK\n\nSuccess!
")
+ conn.close()
+
+ # Make the request
+ response = requests.post(
+ "https://oauth2.googleapis.com/token",
+ data={
+ "client_id": G_CLIENT_ID,
+ "code": request["code"],
+ "grant_type": "authorization_code",
+ "redirect_uri": G_REDIRECT_URI,
+ "client_secret": client_secret,
+ },
+ )
+ logger.info(f"Response: {response.text}")
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error("Failed to get OAuth token")
+ logger.error(response.text)
+ return None
+ access_token = response.json()["access_token"]
+ refresh_token = response.json()["refresh_token"]
+
+ # Save the refresh token
+ refresh_token_path.parent.mkdir(parents=True, exist_ok=True)
+ with refresh_token_path.open("w") as f:
+ f.write(refresh_token)
+
+ # Return the OAuth token
+ return access_token
+
+
+
+def upload_file(file: Path, oauth_token: str):
+ # Read the file
+ with file.open("rb") as f:
+ file_data = f.read()
+
+ # Make the upload request
+ logger.info("Creating new upload")
+ response = requests.post(
+ "https://photoslibrary.googleapis.com/v1/uploads",
+ headers={
+ "Authorization": f"Bearer {oauth_token}",
+ "Content-type": "application/octet-stream",
+ "X-Goog-Upload-File-Name": file.name,
+ "X-Goog-Upload-Protocol": "raw",
+ },
+ data=file_data,
+ )
+ logger.info(f"Uploaded {file.stat().st_size} bytes")
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Failed to upload: {file}")
+ logger.error(response.text)
+ return None
+
+ # Get the upload token
+ upload_token = response.text
+ logger.info(f"Upload token: {upload_token}")
+
+ # Create the media item
+ logger.info("Creating new media item")
+ response = requests.post(
+ "https://photoslibrary.googleapis.com/v1/mediaItems:batchCreate",
+ headers={
+ "Authorization": f"Bearer {oauth_token}",
+ "Content-type": "application/json",
+ },
+ json={
+ "newMediaItems": [
+ {
+ "description": "",
+ "simpleMediaItem": {
+ "fileName": file.name,
+ "uploadToken": upload_token,
+ },
+ }
+ ]
+ },
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Failed to create media item: {file}")
+ logger.error(response.text)
+ return None
+
+ # Log some info about the action
+ for new_item in response.json()["newMediaItemResults"]:
+ if "mediaItem" in new_item:
+ logger.info(f"Created media item: {new_item['mediaItem']['filename']}")
+ logger.info(f"URL: {new_item['mediaItem']['productUrl']}")
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="gp-upload", description="Upload a file to Google Photos"
+ )
+ ap.add_argument("file", help="File to upload")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Authenticate
+ oauth_token = get_google_oauth_token()
+ if oauth_token is None:
+ return 1
+
+ # Upload
+ upload_file(Path(args.file), oauth_token)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/guru-shell b/scripts/guru-shell
new file mode 100755
index 0000000..ede9ec5
--- /dev/null
+++ b/scripts/guru-shell
@@ -0,0 +1,55 @@
+#! /bin/bash
+set -e
+
+# If //qs does not exist
+if [ ! -d "//qs" ]; then
+ echo "This command must be executed on a studio machine!"
+ return 1
+fi
+
+# Figure out the appropriate prefix
+if [ $(uname -o | grep -c Msys) -gt 0 ]; then
+ s_drive="S:/"
+ pathsep=";"
+else
+ s_drive="//qs/resources"
+ pathsep=":"
+fi
+
+# Ask if we want to use the development env
+echo -n "Do you want to use the development environment? (Y/n)"
+read dev_env
+if [ "$dev_env" == "n" ]; then
+ studio2023_path="studio/studio2023"
+ ps1_mode=""
+else
+ studio2023_path="development/epratten/studio2023"
+ ps1_mode="-dev"
+fi
+
+# Ask if we want to force debugging for all processes
+echo -n "Do you want to force debugging for all processes? (y/N)"
+read force_debug
+if [ "$force_debug" == "y" ]; then
+ export GURU_DEBUG=10
+fi
+
+# Set PYTHONPATH based on the data we learned
+export GURU_PYTHON_ROOT="$s_drive/$studio2023_path"
+export PYTHONPATH="$GURU_PYTHON_ROOT/env$pathsep$PYTHONPATH"
+export PYTHONPATH="$GURU_PYTHON_ROOT$pathsep$PYTHONPATH"
+
+# Remove anything from PYTHONPATH that contains Maya_Shared.
+export PYTHONPATH=$(echo $PYTHONPATH | sed "s/[^$pathsep]*Maya_Shared[^$pathsep]*//g" | sed "s/$pathsep$pathsep/$pathsep/g" | sed "s/^$pathsep//g" | sed "s/$pathsep$//g")
+
+# Update the PATH to point to the studio's install of python
+if [ -d "/c/Programs/software/win/core/python/python_3.7.7" ]; then
+ export PATH="/c/Programs/software/win/core/python/python_3.7.7:$PATH"
+fi
+
+# Configure the prompt
+export PS1_CTX="guru$ps1_mode bash"
+
+# Finally, start bash
+export EWP_IN_GURU_ENVIRONMENT=1
+bash
\ No newline at end of file
diff --git a/scripts/guru-sync-issues b/scripts/guru-sync-issues
new file mode 100755
index 0000000..33216e8
--- /dev/null
+++ b/scripts/guru-sync-issues
@@ -0,0 +1,203 @@
+#! /usr/bin/env python3
+
+# fmt:off
+import sys
+import os
+from pathlib import Path
+sys.path.append((Path(os.environ["EWCONFIG_ROOT"]) / "python_modules").as_posix())
+# fmt:on
+
+import argparse
+import sys
+import logging
+import requests
+import json
+from dataclasses import dataclass, field
+from pathlib import Path
+from typing import List, Optional, Dict, Any
+from enum import Enum, auto
+from datetime import datetime
+from ewconfig.secret_manager import get_semi_secret_string
+from ewconfig.trello import TRELLO_API_KEY, get_trello_api_token
+from ewconfig.trello.cards import get_all_trello_cards, create_card
+from ewconfig.trello.boards import PERSONAL_TASKS_BOARD
+
+logger = logging.getLogger(__name__)
+
+GITLAB_PAT = get_semi_secret_string("guru_gitlab_pat", namespace="trello-sync")
+GITLAB_ENDPOINT = "http://gitlab.guru-domain.gurustudio.com/api/v4"
+MY_USER_ID = 64
+TRELLO_API_TOKEN = get_trello_api_token()
+
+TrelloCardId = str
+
+
+class IssueState(Enum):
+ OPEN = "opened"
+ CLOSED = "closed"
+
+
+@dataclass
+class GitLabIssue:
+ title: str
+ issue_id: int
+ global_id: int
+ kind: str
+ state: IssueState
+ created: datetime
+ updated: datetime
+ web_url: str
+ reference_string: str
+ due_date: Optional[datetime] = None
+
+ def get_fmt_id(self) -> str:
+ if self.kind == "merge_request":
+ return f"!{self.global_id}"
+ return f"#{self.global_id}"
+
+ def list_contains_this(self, list_of_ids: List[str]) -> bool:
+ if self.kind == "issue" and self.global_id in list_of_ids:
+ return True
+
+ return self.get_fmt_id() in [str(x) for x in list_of_ids]
+
+
+def get_personal_gitlab_issues(user_id: int = MY_USER_ID) -> List[GitLabIssue]:
+ # Make an API call
+ issues = []
+ response = requests.get(
+ f"{GITLAB_ENDPOINT}/issues",
+ params={
+ "assignee_id": user_id,
+ "private_token": GITLAB_PAT,
+ "per_page": 100,
+ "scope": "all",
+ },
+ )
+ response.raise_for_status()
+ issues.extend(response.json())
+ response = requests.get(
+ f"{GITLAB_ENDPOINT}/merge_requests",
+ params={
+ "assignee_id": user_id,
+ "private_token": GITLAB_PAT,
+ "per_page": 100,
+ "scope": "all",
+ "state": "opened",
+ },
+ )
+ response.raise_for_status()
+ issues.extend(response.json())
+
+ # Parse the response
+ output = []
+ for issue in issues:
+ output.append(
+ GitLabIssue(
+ title=issue["title"],
+ issue_id=issue["iid"],
+ global_id=issue["id"],
+ kind=issue.get("type", "merge_request").lower(),
+ state=IssueState(issue["state"]),
+ created=datetime.fromisoformat(issue["created_at"]),
+ updated=datetime.fromisoformat(issue["updated_at"]),
+ web_url=issue["web_url"],
+ reference_string=issue["references"]["full"],
+ due_date=datetime.fromisoformat(issue["due_date"])
+ if issue.get("due_date")
+ else None,
+ )
+ )
+
+ return output
+
+
+def find_or_create_trello_issue_for(
+ trello_cards: List[Dict[str, Any]], gitlab_issue: GitLabIssue, dry_run: bool = False
+) -> TrelloCardId:
+ # Look for a card that matches the issue
+ for card in trello_cards:
+ # Check the first line of the description for metadata
+ description = card["desc"]
+ desc_first_line = description.split("\n")[0]
+ if not desc_first_line.startswith("**Sync Metadata:** "):
+ continue
+
+ # Parse the metadata
+ metadata = json.loads(desc_first_line.split("`")[1])
+
+ # Check if the card matches
+ if metadata.get("ns") == "guru-gitlab" and gitlab_issue.list_contains_this(
+ metadata.get("ids", [])
+ ):
+ print(card["labels"], card["idLabels"])
+ logger.info(f"Found matching card {card['id']}")
+ return card["id"]
+
+ # Build the description
+ issue_kind = " ".join([part.capitalize() for part in gitlab_issue.kind.split("_")])
+ card_description = "\n\n".join(
+ [
+ f"**Sync Metadata:** `{json.dumps({'ns': 'guru-gitlab', 'ids': [gitlab_issue.get_fmt_id()]})}`",
+ f"**GitLab {issue_kind}:** [`{gitlab_issue.reference_string}`]({gitlab_issue.web_url})\n",
+ "---",
+ ]
+ )
+
+ # Make a new card
+ if not dry_run:
+ return create_card(
+ list_id=PERSONAL_TASKS_BOARD.lists["To Do"],
+ name=gitlab_issue.title,
+ description=card_description,
+ label_ids=[PERSONAL_TASKS_BOARD.tags["GURU"]],
+ position="top",
+ api_key=TRELLO_API_KEY,
+ api_token=TRELLO_API_TOKEN,
+ )
+ else:
+ return "dry-run"
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(description="Syncs issues from GitLab to Trello")
+ ap.add_argument(
+ "--dry-run", help="Don't actually make any changes", action="store_true"
+ )
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Find all issues
+ issues = get_personal_gitlab_issues()
+ logger.info(f"Found {len(issues)} issues")
+
+ # Get a list of cards on the board
+ trello_cards = get_all_trello_cards(
+ board_id=PERSONAL_TASKS_BOARD.id,
+ api_key=TRELLO_API_KEY,
+ api_token=TRELLO_API_TOKEN,
+ )
+ logger.info(f"Found {len(trello_cards)} cards on the board")
+
+ # Handle each issue
+ for issue in issues:
+ # Find the trello card id for this issue
+ trello_card_id = find_or_create_trello_issue_for(
+ trello_cards, issue, dry_run=args.dry_run
+ )
+ logger.info(f"GitLab Issue {issue.global_id} is Trello Card {trello_card_id}")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/guru-vpn b/scripts/guru-vpn
new file mode 100755
index 0000000..9cffc38
--- /dev/null
+++ b/scripts/guru-vpn
@@ -0,0 +1,142 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import subprocess
+from typing import Optional
+
+
+def has_ykman() -> bool:
+ try:
+ subprocess.run(["ykman", "--version"], check=True, stdout=subprocess.DEVNULL)
+ return True
+ except subprocess.CalledProcessError:
+ return False
+
+
+def has_yk_plugged_in() -> bool:
+ devices = subprocess.run(["ykman", "list"], check=True, stdout=subprocess.PIPE)
+ devices = devices.stdout.decode("utf-8").split("\n")
+ return len(devices) > 1
+
+
+def is_interface_up(interface: str) -> bool:
+ try:
+ subprocess.run(
+ ["ip", "link", "show", interface], check=True, stdout=subprocess.DEVNULL
+ )
+ return True
+ except subprocess.CalledProcessError:
+ return False
+
+
+def get_oath_code(service: str) -> Optional[int]:
+ response = subprocess.run(
+ ["ykman", "oath", "accounts", "code", "Guru"],
+ check=True,
+ stdout=subprocess.PIPE,
+ )
+ output = response.stdout.decode("utf-8")
+ if not output:
+ return None
+ return int(output.split("\n")[0].split(" ")[-1])
+
+
+def get_password(label: str, ns: str, key: str) -> str:
+ # Try to find it
+ try:
+ result = subprocess.run(
+ ["secret-tool", "lookup", ns, key], check=True, stdout=subprocess.PIPE
+ )
+ return result.stdout.decode("utf-8")
+ except subprocess.CalledProcessError:
+ # If we are here, it doesn't exist
+ print(f"Enter your {label}")
+ subprocess.run(["secret-tool", "store", "--label", label, ns, key], check=True)
+ return get_password(label, ns, key)
+
+
+def handle_connect(args: argparse.Namespace) -> int:
+ if not has_yk_plugged_in():
+ print("Could not find YubiKey. Is it plugged in?", file=sys.stderr)
+ return 1
+
+ # If we are connected to AS54041, we need to briefly kill the connection
+ if args.wireguard_support and is_interface_up("vpn"):
+ print("Bringing down AS54041 VPN")
+ subprocess.run(["sudo", "wg-quick", "down", "vpn"], check=True)
+
+ # Get the base password
+ base_password = get_password("Guru VPN Password", "guru-vpn", "base-password")
+
+ # Fetch the credentials from the Yubikey
+ oath_code = get_oath_code("Guru")
+ print(f"Using OATH code: {oath_code}")
+ if not len(str(oath_code)) == 6:
+ print("Invalid OATH code length. Try again in a minute.", file=sys.stderr)
+ return 1
+
+ # Construct the one-time password
+ password = f"{base_password}{oath_code}"
+
+ # Connect via nmcli
+ print("Bringing up Guru VPN")
+ subprocess.run(
+ [
+ "nmcli",
+ "connection",
+ "modify",
+ "Guru VPN",
+ "vpn.secrets",
+ f"password={password}",
+ ],
+ check=True,
+ )
+ subprocess.run(["nmcli", "connection", "up", "Guru VPN"], check=True)
+
+ # Bring AS54041 back up
+ print("Bringing up AS54041 VPN")
+ if args.wireguard_support:
+ subprocess.run(["sudo", "wg-quick", "up", "vpn"], check=True)
+
+
+def handle_disconnect(args: argparse.Namespace) -> int:
+ # Disconnect from Guru VPN
+ print("Bringing down Guru VPN")
+ result = subprocess.run(["nmcli", "connection", "down", "Guru VPN"])
+ return result.returncode
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="guru-vpn", description="Utility for connecting to the Guru VPN"
+ )
+ ap.add_argument(
+ "operation",
+ choices=["connect", "disconnect", "reconnect"],
+ help="Operation to perform",
+ )
+ ap.add_argument(
+ "-w",
+ "--wireguard-support",
+ help="Handles wireguard interfaces",
+ action="store_true",
+ )
+ args = ap.parse_args()
+
+ # Ensure we can actually get credentials from the Yubikey
+ if not has_ykman():
+ print("Could not execute `ykman`. Is it installed?", file=sys.stderr)
+ return 1
+
+ # Handle subcommands
+ cmd_fns = {
+ "connect": handle_connect,
+ "disconnect": handle_disconnect,
+ "reconnect": lambda args: handle_disconnect(args) or handle_connect(args),
+ }
+ return cmd_fns[args.operation](args)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/houdini-tool b/scripts/houdini-tool
new file mode 100755
index 0000000..e0d61a1
--- /dev/null
+++ b/scripts/houdini-tool
@@ -0,0 +1,137 @@
+#! /usr/bin/env python3
+
+# fmt:off
+import sys
+import os
+from pathlib import Path
+sys.path.append((Path(os.environ["EWCONFIG_ROOT"]) / "python_modules").as_posix())
+# fmt:on
+
+import argparse
+import subprocess
+import logging
+from ewpipe.common.dirs import HOUDINI_PROJECTS_DIR
+from ewpipe.common.utils.path import prepend_if_relative
+from ewpipe.houdini.editions import (
+ get_binary_name_for_edition,
+ get_houdini_edition_args,
+ HOU_EDITIONS,
+ noncomercialize_path,
+)
+from ewpipe.houdini.installations import get_houdini_installation_path
+from ewpipe.common.logging import configure_logging
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="houdini-tool",
+ description="Evan's tool for launching and managing Houdini",
+ )
+ ap.add_argument(
+ "--type",
+ "-t",
+ help="Houdini type",
+ choices=HOU_EDITIONS,
+ default="apprentice",
+ )
+ ap.add_argument(
+ "--project",
+ "-p",
+ help="Name of the project to open or create. May also be a direct path",
+ type=str,
+ required=True,
+ )
+ ap.add_argument(
+ "--sub-project",
+ "--sp",
+ help="Name of the sub-project to open",
+ type=str,
+ default=None,
+ )
+ ap.add_argument(
+ "--hou-version",
+ help="Houdini version to use. Defaults to latest",
+ type=str,
+ default=None,
+ )
+ ap.add_argument(
+ "--no-project-env", help="Disables setting $HIP and $JOB", action="store_true"
+ )
+ ap.add_argument("--cpu", help="Use CPU compute for OpenCL", action="store_true")
+ ap.add_argument(
+ "--dump-core", help="Forces Houdini to dump its core", action="store_true"
+ )
+ ap.add_argument("--verbose", "-v", help="Verbose output", action="store_true")
+ args = ap.parse_args()
+
+ # Set up verbose logging if requested
+ configure_logging(verbose=args.verbose)
+
+ # Get the houdini path
+ hou_path = get_houdini_installation_path(version=args.hou_version)
+ if not hou_path:
+ logger.error("Could not find Houdini installation")
+ return 1
+ logger.info(f"Selected Houdini {hou_path.name[3:]} from {hou_path}")
+
+ # Determine the project path
+ project_path = prepend_if_relative(HOUDINI_PROJECTS_DIR, Path(args.project))
+ project_save_file = project_path / (
+ f"{args.sub_project}.hip" if args.sub_project else f"{project_path.name}.hip"
+ )
+ logger.info(f"Opening project from: {project_path}")
+
+ # If the directory does not exist, create
+ project_path.mkdir(parents=True, exist_ok=True)
+ (project_path / "render").mkdir(parents=True, exist_ok=True)
+
+ # If allowed, set up env vars
+ hou_env_settings = {}
+ hou_env_settings["HOUDINI_SCRIPT_DEBUG"] = "1"
+ hou_env_settings["HOUDINI_SPLASH_MESSAGE"] = "Loading with custom scripts"
+ hou_env_settings["HOUDINI_CONSOLE_PYTHON_PANEL_ERROR"] = "1"
+ hou_env_settings["HOUDINI_PDG_NODE_DEBUG"] = "3"
+ if args.cpu:
+ hou_env_settings["HOUDINI_OCL_DEVICETYPE"] = "CPU"
+ hou_env_settings["HOUDINI_USE_HFS_OCL"] = "1"
+ if args.dump_core:
+ hou_env_settings["HOUDINI_COREDUMP"] = "1"
+ if not args.no_project_env:
+ # environment_vars["HIP"] = str(project_path)
+ hou_env_settings["JOB"] = str(project_path)
+ hou_env_settings["HOUDINI_HIP_DEFAULT_NAME"] = project_save_file.name
+
+ # Figure out what has changed in the environment and print the changes
+ if hou_env_settings:
+ logger.info("Environment changes:")
+ for key, value in hou_env_settings.items():
+ logger.info(f" ${key}: {value}")
+
+ # Combine the current environment with
+ cmd_env = dict(os.environ)
+ cmd_env.update(hou_env_settings)
+
+ # Build command to launch houdini
+ cmd = [
+ str(hou_path / "bin" / get_binary_name_for_edition(args.type)),
+ "-foreground",
+ ] + get_houdini_edition_args(args.type)
+
+ # If the expected project file exists already
+ # (aka, user already saved in a previous session),
+ # then conveniently open the project automatically
+ proj_file = noncomercialize_path(project_save_file)
+ if proj_file.exists():
+ cmd.append(str(proj_file))
+
+ # Run houdini
+ logger.info(f"Running: {' '.join(cmd)}")
+ status = subprocess.run(cmd, env=cmd_env, cwd=project_path).returncode
+ return status
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/hython-latest b/scripts/hython-latest
new file mode 100755
index 0000000..205e7ca
--- /dev/null
+++ b/scripts/hython-latest
@@ -0,0 +1,9 @@
+#! /bin/bash
+set -e
+
+# Find hython
+HOUDINI_PATH=`python3 ~/.config/ewconfig/python_modules/ewpipe/houdini/installations.py`
+HYTHON_PATH=$HOUDINI_PATH/bin/hython
+
+# Execute hython, passing through all arguments
+$HYTHON_PATH $@
\ No newline at end of file
diff --git a/scripts/ifpi b/scripts/ifpi
new file mode 100755
index 0000000..c9444d5
--- /dev/null
+++ b/scripts/ifpi
@@ -0,0 +1,172 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import os
+import logging
+import subprocess
+import ipaddress
+import re
+
+logger = logging.getLogger(__name__)
+
+HOSTNAME_CACHE = {}
+
+HOSTNAME_PATTERNS = {
+ "GitHub": [re.compile(r".*github\.com\.")],
+ "Google": [re.compile(r".*google\.com\."), re.compile(r".*1e100\.net\.")],
+ "Google Cloud": [re.compile(r".*googleusercontent\.com\.")],
+ "Amazon Web Services": [re.compile(r".*amazonaws\.com\.")],
+ "Cloudfront": [re.compile(r".*cloudfront\.net\.")],
+ "Evan's Infrastructure": [re.compile(r".*ewp\.fyi\.")],
+}
+
+
+def classify_traffic(
+ destination_addr: ipaddress.IPv4Address, destination_port: int, packet_proto: str
+) -> str:
+ packet_proto = packet_proto.upper().replace(",", "")
+
+ # Handle some easy cases
+ if destination_addr.is_multicast:
+ return "multicast"
+ if destination_port == 22 and packet_proto == "TCP":
+ return "SSH"
+ if destination_port == 53 and packet_proto == "UDP":
+ return "DNS"
+
+ # Use nslookup to get the hostname
+ if destination_addr not in HOSTNAME_CACHE:
+ try:
+ hostname = (
+ subprocess.check_output(
+ ["nslookup", str(destination_addr)], stderr=subprocess.DEVNULL
+ )
+ .decode("utf-8")
+ .split("\n")[0]
+ .split(" ")[-1]
+ )
+ HOSTNAME_CACHE[destination_addr] = hostname
+ except subprocess.CalledProcessError:
+ HOSTNAME_CACHE[destination_addr] = None
+
+ # Get the hostname
+ hostname = HOSTNAME_CACHE[destination_addr] or destination_addr
+
+ # If this is HTTP/HTTPS traffic, try to figure out the service
+ if (packet_proto == "TCP" and destination_port in [80, 443]) or (
+ packet_proto == "UDP" and destination_port == 443
+ ):
+ for service, patterns in HOSTNAME_PATTERNS.items():
+ for pattern in patterns:
+ if pattern.match(str(hostname)):
+ return service
+
+ # Fallbacks in case we can't figure anything else out
+ if packet_proto == "TCP" and destination_port == 443:
+ return f"HTTPS ({hostname})"
+ if packet_proto == "TCP" and destination_port == 80:
+ return f"HTTP ({hostname})"
+ if packet_proto == "TCP" and destination_port == 443:
+ return f"QUIC ({hostname})"
+
+ return f"Unknown({packet_proto}, {destination_port}, {hostname})"
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog="ifpi", description="Interface Packet Inspector")
+ ap.add_argument("interface", help="Interface to listen on")
+ ap.add_argument(
+ "--local-subnet",
+ "-l",
+ help="Subnet(s) to consider local",
+ action="append",
+ )
+ ap.add_argument(
+ "--ignore-ssh",
+ help="Ignore SSH traffic",
+ action="store_true",
+ )
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # If we are not root, re-launch ourselves with sudo
+ if not os.geteuid() == 0:
+ return subprocess.call(["sudo"] + sys.argv)
+
+ # Convert the local subnets to IPNetwork objects
+ local_subnets = []
+ if args.local_subnet:
+ for subnet in args.local_subnet:
+ local_subnets.append(ipaddress.ip_network(subnet))
+
+ # Launch tcpdump
+ tcpdump_args = [
+ "tcpdump",
+ "-i",
+ args.interface,
+ "-nn",
+ "-tt",
+ "-q",
+ ]
+ process = subprocess.Popen(
+ tcpdump_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ )
+
+ # Read all lines as they are printed
+ for line in process.stdout:
+ line = line.decode("utf-8").strip()
+
+ # The format is (time, proto, data)
+ timestamp, protocol, data = line.split(" ", 2)
+
+ # We will only handle IP packets
+ if protocol not in ["IP", "IP6"]:
+ continue
+
+ # Extract source and destination IPs, along with the metadata
+ routing, metadata = data.split(": ", 1)
+ source, destination = routing.split(" > ")
+ source_port, destination_port = (
+ source.split(".")[-1],
+ destination.split(".")[-1],
+ )
+ try:
+ source = ipaddress.ip_address(".".join(source.split(".")[:-1]))
+ destination = ipaddress.ip_address(".".join(destination.split(".")[:-1]))
+ except ValueError:
+ continue
+
+ # Only pay attention to source addrs that are local
+ for subnet in local_subnets:
+ if source in subnet:
+ break
+ else:
+ continue
+
+ # Classify the traffic
+ classification = classify_traffic(
+ destination, int(destination_port), metadata.lstrip().split(" ")[0]
+ )
+
+ # Handle ignoring SSH traffic
+ if args.ignore_ssh and classification == "SSH":
+ continue
+
+ print(f"{source}\t{classification}")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/install-nvim-appimage b/scripts/install-nvim-appimage
new file mode 100755
index 0000000..564536d
--- /dev/null
+++ b/scripts/install-nvim-appimage
@@ -0,0 +1,9 @@
+#! /bin/sh
+set -ex
+
+# Download the appimage to .local/bin
+mkdir -p ~/.local/bin
+wget https://github.com/neovim/neovim/releases/download/stable/nvim.appimage -O ~/.local/bin/nvim
+
+# Make it executable
+chmod +x ~/.local/bin/nvim
diff --git a/scripts/install-nvim-from-source b/scripts/install-nvim-from-source
new file mode 100755
index 0000000..8ede3be
--- /dev/null
+++ b/scripts/install-nvim-from-source
@@ -0,0 +1,39 @@
+#! /usr/bin/env bash
+set -e
+
+echo "You have chosen to isntall neovim from source."
+
+# If ~/src/neovim doesn't exist, clone a fresh copy
+cd ~/src
+if [ ! -d ~/src/neovim ]; then
+ git clone https://github.com/neovim/neovim
+fi
+cd neovim
+
+# Handle branch checkout
+echo "Do you want to switch to the stable branch? (y/n)"
+read -r -n 1 response
+if [[ $response =~ ^([yY][eE][sS]|[yY])$ ]]; then
+ git checkout stable
+fi
+
+# Figure out the appropriate make command.
+if [ -x "$(command -v gmake)" ]; then
+ MAKE_CMD=gmake
+else
+ MAKE_CMD=make
+fi
+
+# Determine the install prefix
+NVIM_INSTALL_PREFIX=${NVIM_INSTALL_PREFIX:-$HOME/.local}
+
+# Build
+echo "Building neovim..."
+$MAKE_CMD CMAKE_BUILD_TYPE=Release CMAKE_EXTRA_FLAGS="-DCMAKE_INSTALL_PREFIX=$NVIM_INSTALL_PREFIX"
+
+# Install
+echo "Would you like to install neovim? (y/n)"
+read -r -n 1 response
+if [[ $response =~ ^([yY][eE][sS]|[yY])$ ]]; then
+ $MAKE_CMD install
+fi
diff --git a/scripts/kxchat b/scripts/kxchat
new file mode 100755
index 0000000..7508af1
--- /dev/null
+++ b/scripts/kxchat
@@ -0,0 +1,71 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxchat", description="Chat over CW using the keyboard"
+ )
+ ap.add_argument("--tx-only", help="Only transmit, do not receive", action="store_true")
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ stream=sys.stderr,
+ )
+
+ # Spawn a kxlisten process and hook its STDOUT to our STDOUT
+ if not args.tx_only:
+ logger.debug(f"Starting kxlisten process")
+ kxlisten = subprocess.Popen(
+ ["kxlisten", "-d", args.device, "-b", str(args.baud)],
+ )
+
+ # Read lines from keyboard, and send them to the radio using kxsend
+ try:
+ while True:
+ # Read a line from the keyboard
+ line = sys.stdin.readline().strip()
+
+ # If the line is empty, skip
+ if not line:
+ continue
+
+ # Add a space onto the end of the line
+ line += " "
+
+ # Send the line to the radio
+ logger.debug(f"Sending line: {line}")
+ kxsend = subprocess.Popen(
+ ["kxsend", "-d", args.device, "-b", str(args.baud), line],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+
+ except KeyboardInterrupt:
+ logger.info("Done transmitting")
+
+ # Stop the kxlisten process
+ if not args.tx_only:
+ logger.debug(f"Stopping kxlisten process")
+ kxlisten.terminate()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/kxcq b/scripts/kxcq
new file mode 100755
index 0000000..837599c
--- /dev/null
+++ b/scripts/kxcq
@@ -0,0 +1,4 @@
+#! /bin/bash
+set -ex
+
+kxsend "CQ CQ CQ DE VA3ZZA VA3ZZA CQ CQ K" -v
diff --git a/scripts/kxfilter b/scripts/kxfilter
new file mode 100755
index 0000000..3546487
--- /dev/null
+++ b/scripts/kxfilter
@@ -0,0 +1,144 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+
+logger = logging.getLogger(__name__)
+
+
+def set_apf(s: serial.Serial, state: str) -> None:
+ if state == "on":
+ s.write(b"AP1;")
+ elif state == "off":
+ s.write(b"AP0;")
+
+
+def set_nb(s: serial.Serial, state: str) -> None:
+ if state == "on":
+ s.write(b"NB1;")
+ elif state == "off":
+ s.write(b"NB0;")
+ else:
+ s.write("NL{:0>2}00;".format(state).encode("ascii"))
+ s.write(b"NB1;")
+
+
+def set_preamp(s: serial.Serial, state: str) -> None:
+ if state == "on":
+ s.write(b"PA1;")
+ elif state == "off":
+ s.write(b"PA0;")
+
+
+def set_attenuator(s: serial.Serial, state: str) -> None:
+ if state == "on":
+ s.write(b"RA01;")
+ elif state == "off":
+ s.write(b"RA00;")
+
+
+def set_filter_bandwidth(s: serial.Serial, bandwidth: float) -> None:
+ s.write("BW{:0>4};".format(int(bandwidth * 100)).encode("ascii"))
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxfilter", description="Change filter settings on a KX2 or KX3"
+ )
+ ap.add_argument(
+ "--audio-peaking-filter",
+ "--apf",
+ help="Control the Audio Peaking Filter",
+ choices=["on", "off"],
+ )
+ ap.add_argument(
+ "--noise-blanker",
+ "--nb",
+ help="Control the noise blanker",
+ choices=["on", "off"] + [str(x) for x in range(0, 16)],
+ )
+ ap.add_argument(
+ "--pre-amp", "--pa", help="Control the pre-amp", choices=["on", "off"]
+ )
+ ap.add_argument(
+ "--attenuator", "--att", help="Control the attenuator", choices=["on", "off"]
+ )
+ ap.add_argument(
+ "--filter-bandwidth",
+ "--bw",
+ help="Filter bandwidth",
+ type=float,
+ choices=[
+ 0.05,
+ 0.10,
+ 0.15,
+ 0.20,
+ 0.25,
+ 0.30,
+ 0.35,
+ 0.40,
+ 0.45,
+ 0.5,
+ 0.6,
+ 0.7,
+ 0.8,
+ 0.9,
+ 1.0,
+ 1.2,
+ 1.4,
+ 1.6,
+ 2.0,
+ 2.4,
+ 2.8,
+ ],
+ )
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ stream=sys.stderr,
+ )
+
+ # Connect to the radio
+ logger.debug(f"Connecting to radio: {args.device}")
+ serial_conn = serial.Serial(args.device, args.baud)
+
+ # Handle APF
+ if args.audio_peaking_filter:
+ logger.info(f"Setting APF: {args.audio_peaking_filter}")
+ set_apf(serial_conn, args.audio_peaking_filter)
+
+ # Handle NB
+ if args.noise_blanker:
+ logger.info(f"Setting Noise Blanker: {args.noise_blanker}")
+ set_nb(serial_conn, args.noise_blanker)
+
+ # Handle PA
+ if args.pre_amp:
+ logger.info(f"Setting Pre-Amp: {args.pre_amp}")
+ set_preamp(serial_conn, args.pre_amp)
+
+ # Handle RX ATT
+ if args.attenuator:
+ logger.info(f"Setting RX Attenuator: {args.attenuator}")
+ set_attenuator(serial_conn, args.attenuator)
+
+ # Handle filter bandwidth
+ if args.filter_bandwidth:
+ logger.info(f"Setting Filter Bandwidth: {args.filter_bandwidth}")
+ set_filter_bandwidth(serial_conn, args.filter_bandwidth)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/kxlisten b/scripts/kxlisten
new file mode 100755
index 0000000..a2b688e
--- /dev/null
+++ b/scripts/kxlisten
@@ -0,0 +1,62 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxlisten", description="Route inbound CW text to STDOUT"
+ )
+ ap.add_argument("--stop", help="Stop listening", action="store_true")
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ stream=sys.stderr,
+ )
+
+ # Connect to the radio
+ logger.debug(f"Connecting to radio: {args.device}")
+ serial_conn = serial.Serial(args.device, args.baud)
+
+ # Handle stopping
+ if args.stop:
+ cmd = "TT0;"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+ return 0
+
+ # Otherwise, start listening
+ cmd = "TT1;"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ # Listen for text
+ try:
+ while True:
+ next_char = serial_conn.read().decode("ascii")
+ print(next_char, end="")
+ sys.stdout.flush()
+ except KeyboardInterrupt:
+ logger.info("Stopping")
+ cmd = "TT0;"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/kxpoweroff b/scripts/kxpoweroff
new file mode 100755
index 0000000..23dec14
--- /dev/null
+++ b/scripts/kxpoweroff
@@ -0,0 +1,42 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxpoweroff", description="Turn off a KX2 or KX3"
+ )
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Connect to the radio
+ logger.debug(f"Connecting to radio: {args.device}")
+ serial_conn = serial.Serial(args.device, args.baud)
+
+ # Send the appropriate command
+ cmd = "PS0;"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/kxpwr b/scripts/kxpwr
new file mode 100755
index 0000000..47f536d
--- /dev/null
+++ b/scripts/kxpwr
@@ -0,0 +1,51 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxpwr", description="Set the TX power on a KX2"
+ )
+ ap.add_argument("watts", help="Power level in watts", type=int)
+ ap.add_argument("--retune", help="Re-tunes the antenna after setting power", action="store_true")
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Connect to the radio
+ logger.debug(f"Connecting to radio: {args.device}")
+ serial_conn = serial.Serial(args.device, args.baud)
+
+ # Set the power level
+ cmd = "PC{:0>3};".format(args.watts)
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ # Re-tune the antenna if needed
+ if args.retune:
+ cmd = "SWT20;"
+ logger.warning("Re-tuning antenna")
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/kxqrl b/scripts/kxqrl
new file mode 100755
index 0000000..1a068f6
--- /dev/null
+++ b/scripts/kxqrl
@@ -0,0 +1,4 @@
+#! /bin/bash
+set -ex
+
+kxsend "QRL?" -v
diff --git a/scripts/kxsend b/scripts/kxsend
new file mode 100755
index 0000000..a749a92
--- /dev/null
+++ b/scripts/kxsend
@@ -0,0 +1,56 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxsend", description="Send a string over CW on a KX2"
+ )
+ ap.add_argument("text", help="Text to send", nargs="+")
+ ap.add_argument("--speed", help="Transmit speed in WPM", type=int)
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+ args.text = " ".join(args.text)
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Connect to the radio
+ logger.debug(f"Connecting to radio: {args.device}")
+ serial_conn = serial.Serial(args.device, args.baud)
+
+ # If we have a specific speed, set it
+ if args.speed:
+ cmd = "KS{:0>3};".format(args.speed)
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ # Break the text into max 24 character chunks
+ chunks = [args.text[i:i+24] for i in range(0, len(args.text), 24)]
+ logger.info(f"Sending {len(chunks)} chunks")
+
+ # Handle each chunk
+ for chunk in chunks:
+ # Send the text
+ cmd = f"KY {chunk};"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/kxtune b/scripts/kxtune
new file mode 100755
index 0000000..b5ef0b1
--- /dev/null
+++ b/scripts/kxtune
@@ -0,0 +1,67 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import serial
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="kxtune", description="Tune a KX2 or KX3 to a new frequency"
+ )
+ ap.add_argument("frequency", help="Frequency to tune to in KC", type=float)
+ ap.add_argument("--vfo", help="VFO to tune", choices=["a", "b"], default="a")
+ ap.add_argument(
+ "--mode",
+ help="Radio mode",
+ choices=["lsb", "usb", "cw", "fm", "am", "data", "cw-r", "data-r"],
+ )
+ ap.add_argument("--device", "-d", help="Serial device", default="/dev/ttyUSB0")
+ ap.add_argument("--baud", "-b", help="Serial baud rate", default=38400, type=int)
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Convert to Hz
+ frequency = int(args.frequency * 1000)
+
+ # Connect to the radio
+ logger.debug(f"Connecting to radio: {args.device}")
+ serial_conn = serial.Serial(args.device, args.baud)
+
+ # Send the tune command
+ cmd = f"F{args.vfo.upper()}{frequency:011d};"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ # If we have a mode, set it
+ mode_id = {
+ "lsb": "1",
+ "usb": "2",
+ "cw": "3",
+ "fm": "4",
+ "am": "5",
+ "data": "6",
+ "cw-r": "7",
+ "data-r": "9",
+ }.get(args.mode)
+ if mode_id:
+ cmd = f"MD{mode_id};"
+ logger.debug(f"Sending command: {cmd}")
+ serial_conn.write(cmd.encode("ascii"))
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/mc-log-cat b/scripts/mc-log-cat
new file mode 100755
index 0000000..288a2c9
--- /dev/null
+++ b/scripts/mc-log-cat
@@ -0,0 +1,34 @@
+#! /usr/bin/env python3
+
+import sys
+import argparse
+import subprocess
+from pathlib import Path
+
+def main() -> int:
+ ap = argparse.ArgumentParser(prog="mc-log-cat", description="Analyze a set of Minecraft logs")
+ ap.add_argument("logs_dir", help="Path to the logs directory", type=Path)
+ args = ap.parse_args()
+
+ # Find all compressed (old) logs
+ compressed_logs = list(args.logs_dir.glob("*.log.gz"))
+ compressed_logs.sort()
+
+ # Non-destructively read the contents of each archive
+ log_files = []
+ for file in compressed_logs:
+ file_contents = subprocess.run(["gunzip", "-c", file.as_posix()], capture_output=True, text=True)
+ log_files.append(file_contents.stdout)
+
+ # Read and append the most recent log
+ if args.logs_dir.joinpath("latest.log").exists():
+ log_files.append(args.logs_dir.joinpath("latest.log").read_text())
+
+ # Print the logs
+ for log in log_files:
+ print(log)
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/memegen b/scripts/memegen
new file mode 100755
index 0000000..ae77f67
--- /dev/null
+++ b/scripts/memegen
@@ -0,0 +1,226 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import json
+import subprocess
+from PIL import Image, ImageDraw, ImageFont
+from typing import List, Dict, Any, Optional
+from pathlib import Path
+from dataclasses import dataclass
+from enum import Enum
+from datetime import datetime
+
+logger = logging.getLogger(__name__)
+CONFIG_DIR = Path("~/.config/memegen").expanduser()
+DEFAULT_OUTPUT_DIR = Path("~/Pictures/memes").expanduser()
+
+
+@dataclass
+class MemeTemplate:
+ image_path: Path
+ config: Dict[str, Any]
+
+
+class HorizontalAlignment(Enum):
+ LEFT = "left"
+ CENTER = "center"
+ RIGHT = "right"
+
+
+class VerticalAlignment(Enum):
+ TOP = "top"
+ CENTER = "center"
+ BOTTOM = "bottom"
+
+
+def discover_templates() -> List[str]:
+ # Find all directories in the templates directory
+ return [p.name for p in (CONFIG_DIR / "templates").glob("*") if p.is_dir()]
+
+
+def load_template(name: str) -> MemeTemplate:
+ logger.info(f"Loading template: {name}")
+
+ # Find the template directory
+ template_dir = CONFIG_DIR / "templates" / name
+ if not template_dir.exists():
+ logger.error(f"Template {name} does not exist")
+ sys.exit(1)
+
+ return MemeTemplate(
+ image_path=template_dir / "template.png",
+ config=json.loads((template_dir / "config.json").read_text()),
+ )
+
+
+def calc_width_from_image(width_str: str, image_width: int) -> int:
+ if width_str.endswith("%"):
+ return int(image_width * int(width_str[:-1]) / 100)
+ else:
+ return int(width_str)
+
+
+def render_text_on_image(image: Image, text: str, zone: str, config: Dict[str, Any]):
+ # NOTE: This must handle text with newlines
+ # Get the zone config
+ zone_config = config["zones"][zone]
+ horizontal_alignment = HorizontalAlignment(zone_config["horizontal_align"])
+ vertical_alignment = VerticalAlignment(zone_config["vertical_align"])
+ text_width = calc_width_from_image(zone_config["width"], image.width)
+ max_line_height = zone_config["max_line_height"]
+ font_path = CONFIG_DIR / "fonts" / config["font"]
+
+ # Create the font
+ font = None
+ font_size = 1
+ while True:
+ font = ImageFont.truetype(str(font_path), font_size)
+
+ # Split the text into lines
+ lines = text.splitlines()
+ bounding_boxes = []
+ for line in lines:
+ bounding_boxes.append(font.getbbox(line))
+
+ # Calculate the height of the text
+ line_height = max([bbox[3] for bbox in bounding_boxes])
+ total_height = sum(
+ [bbox[3] + zone_config["line_spacing"] for bbox in bounding_boxes]
+ )
+ max_width = max([bbox[2] for bbox in bounding_boxes])
+
+ # If we have a max line height, ensure we don't exceed it
+ if max_line_height and line_height > max_line_height:
+ font_size -= 1
+ break
+
+ # Don't exceed the width
+ if max_width > text_width:
+ font_size -= 1
+ break
+
+ # Increment the font size
+ font_size += 1
+
+ # Determine the starting Y position
+ y = zone_config["vertical_offset"]
+ if vertical_alignment == VerticalAlignment.CENTER:
+ y += (image.height - total_height) / 2
+ elif vertical_alignment == VerticalAlignment.BOTTOM:
+ y += image.height - total_height
+
+ # Render each line onto the image
+ draw = ImageDraw.Draw(image)
+ for line in text.splitlines():
+ # Calculate the x position
+ if horizontal_alignment == HorizontalAlignment.LEFT:
+ x = zone_config["horizontal_offset"]
+ elif horizontal_alignment == HorizontalAlignment.CENTER:
+ x = ((image.width - font.getbbox(line)[2]) / 2) + zone_config[
+ "horizontal_offset"
+ ]
+ elif horizontal_alignment == HorizontalAlignment.RIGHT:
+ x = (image.width - font.getbbox(line)[2]) + zone_config["horizontal_offset"]
+ else:
+ raise ValueError(f"Invalid horizontal alignment: {horizontal_alignment}")
+
+ # Render the text
+ draw.text(
+ (x, y),
+ line,
+ fill=tuple(config["fill_color"]),
+ stroke_fill=tuple(config["stroke_color"]),
+ stroke_width=config["stroke_width"],
+ font=font,
+ )
+
+ # Increment the y position
+ y += line_height + zone_config["line_spacing"]
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog="memegen", description="Generates memes")
+ ap.add_argument(
+ "template", help="The template to use", choices=discover_templates()
+ )
+ ap.add_argument("--top-text", help="Top text (if applicable)")
+ ap.add_argument("--bottom-text", help="Bottom text (if applicable)")
+ ap.add_argument(
+ "--keep-case", help="Keep the case of the text", action="store_true"
+ )
+ ap.add_argument("--output", "-o", help="Output file path")
+ ap.add_argument("--no-show", help="Don't show the image after creation", action="store_true")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Load the template
+ template = load_template(args.template)
+ template_supports_top_text = "top" in template.config["zones"]
+ template_supports_bottom_text = "bottom" in template.config["zones"]
+
+ # Ensure we have text
+ if args.top_text and not template_supports_top_text:
+ logger.error(f"Template {args.template} does not support top text")
+ sys.exit(1)
+ if args.bottom_text and not template_supports_bottom_text:
+ logger.error(f"Template {args.template} does not support bottom text")
+ sys.exit(1)
+ if not args.top_text and not args.bottom_text:
+ logger.error("No text provided")
+ if not all([template_supports_top_text, template_supports_bottom_text]):
+ required_text = "top" if template_supports_top_text else "bottom"
+ logger.error(
+ f"Template {args.template} requires the --{required_text}-text argument"
+ )
+ sys.exit(1)
+
+ # Transform the text
+ # fmt:off
+ top_text = args.top_text.upper() if args.top_text and (not args.keep_case) else args.top_text
+ bottom_text = args.bottom_text.upper() if args.bottom_text and (not args.keep_case) else args.bottom_text
+ top_text = top_text.replace("\\n", "\n").replace("\\N", "\n") if top_text else None
+ bottom_text = bottom_text.replace("\\n", "\n").replace("\\N", "\n") if bottom_text else None
+ # fmt: on
+
+ # Load the image
+ image = Image.open(template.image_path)
+
+ # Render the text
+ if top_text:
+ render_text_on_image(image, top_text, "top", template.config)
+ if bottom_text:
+ render_text_on_image(image, bottom_text, "bottom", template.config)
+
+ # Build the output path
+ output_path = (
+ Path(args.output)
+ if args.output
+ else (
+ DEFAULT_OUTPUT_DIR
+ / f"meme-{datetime.now().strftime('%Y-%m-%d-%H-%M-%S')}.{args.template}.png"
+ )
+ )
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Save the image
+ image.save(output_path)
+
+ # Show the image
+ if not args.no_show:
+ subprocess.run(["xdg-open", str(output_path)])
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/pwnagotchi-watch b/scripts/pwnagotchi-watch
new file mode 100755
index 0000000..2932e54
--- /dev/null
+++ b/scripts/pwnagotchi-watch
@@ -0,0 +1,84 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import requests
+import pygame
+import time
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog='pwnagotchi-watch', description='Watch the pwnagotchi screen remotely')
+ ap.add_argument("--host", help="Pwnagotchi hostname or IP address", default="10.0.0.2")
+ ap.add_argument("--port", help="Pwnagotchi port", type=int, default=8080)
+ ap.add_argument("-u", "--username", help="Pwnagotchi username", default="changeme")
+ ap.add_argument("-p", "--password", help="Pwnagotchi password", default="changeme")
+ ap.add_argument("--refresh-rate", help="Refresh rate in seconds", type=int, default=2)
+ ap.add_argument("--pin", help="Pin the window on top", action="store_true")
+ ap.add_argument('-v', '--verbose', help='Enable verbose logging', action='store_true')
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format='%(levelname)s: %(message)s',
+ )
+
+ # Create a window to draw the image to
+ pygame.init()
+ screen = pygame.display.set_mode((250,122))
+ pygame.display.set_caption(f"pwnagotchi ({args.host})")
+
+ # Pin the window on top
+ if args.pin:
+ logger.info("Pinning the window on top")
+ window_id = pygame.display.get_wm_info()["window"]
+ subprocess.run(["wmctrl", "-i", "-r", str(window_id), "-b", "add,above"], check=True)
+
+ # Continuously fetch the pwnagotchi screen
+ while True:
+
+ # If the window is closed, exit
+ for event in pygame.event.get():
+ if event.type == pygame.QUIT:
+ return 0
+
+ # Attempt to fetch the pwnagotchi screen
+ try:
+ # Fetch the pwnagotchi screen
+ logger.debug(f"Fetching pwnagotchi screen from {args.host}:{args.port}")
+ try:
+ response = requests.get(f"http://{args.host}:{args.port}/ui", auth=(args.username, args.password), timeout=1)
+ response.raise_for_status()
+ except Exception as e:
+ logger.error(f"Failed to connect to pwnagotchi at {args.host}:{args.port}")
+ logger.error(f"Error: {e}")
+ time.sleep(args.refresh_rate)
+ continue
+ screen_data = response.content
+
+ # Draw the screen to the window
+ logger.debug("Drawing pwnagotchi screen")
+ with open("/tmp/pwnscreen.png", "wb") as f:
+ f.write(screen_data)
+ screen.fill((0,0,0))
+ try:
+ img = pygame.image.load("/tmp/pwnscreen.png")
+ screen.blit(img, (0,0))
+ pygame.display.flip()
+ except pygame.error as e:
+ logger.error(f"Failed to load pwnagotchi screen: {e}")
+
+ except requests.RequestException as e:
+ logger.error(f"Failed to fetch pwnagotchi screen: {e}")
+
+ time.sleep(args.refresh_rate)
+
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/pytb b/scripts/pytb
new file mode 100755
index 0000000..fe0bddb
--- /dev/null
+++ b/scripts/pytb
@@ -0,0 +1,134 @@
+#! /usr/bin/env python
+import argparse
+import sys
+import logging
+import re
+from pathlib import Path
+from rich.console import Console
+from rich.syntax import Syntax
+from datetime import datetime
+
+logger = logging.getLogger(__name__)
+
+LINE_NUMBER_RE = re.compile(r", line \d+,")
+OUTPUT_ROOT = Path("~/Pictures/pytb").expanduser()
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="pytb", description="Tool for analyzing Python back traces"
+ )
+ ap.add_argument("--file", "-f", help="Read from file instead of stdin", type=Path)
+ ap.add_argument(
+ "--no-strip-referer", help="Strip referer from flask tbs", action="store_true"
+ )
+ ap.add_argument("--trace-only", help="Only print the trace", action="store_true")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Attempt to read from file
+ if args.file:
+ tb_lines = args.file.read_text().splitlines()
+ else:
+ # Check if the shell is interactive
+ if sys.stdin.isatty():
+ print("Please paste the backtrace and press Ctrl+D:")
+
+ # Read from stdin until EOF
+ try:
+ tb_lines = "".join(list(sys.stdin)).splitlines()
+ except KeyboardInterrupt:
+ print("\nKeyboard interrupt detected, exiting...")
+ return 1
+
+ # Seek to the first line of the backtrace
+ for start_idx, line in enumerate(tb_lines):
+ if line.startswith("Traceback"):
+ break
+ else:
+ logger.error("No traceback found")
+ return 1
+
+ # Group the traceback lines into frames
+ frames = []
+ is_in_frame = False
+ for line in tb_lines[start_idx:]:
+ if line.lstrip().startswith("File "):
+ is_in_frame = True
+ frames.append([line])
+ elif is_in_frame:
+ frames[-1].append(line)
+
+ # Handle the frames
+ output_lines = []
+ for frame in frames:
+ # Figure out the file
+ file = Path(frame[0].split('"')[1])
+ line_num = int(LINE_NUMBER_RE.search(frame[0]).group(0)[6:-1])
+
+ # Print the actual code
+ for idx, statement in enumerate(frame[1:]):
+ # Remove left padding
+ statement = statement.lstrip()
+
+ # Remove referer if needed
+ if not args.no_strip_referer:
+ statement = statement.split(", referer")[0]
+
+ # Build a context string if needed
+ context = f" # {file}#{line_num}" if idx == 0 else ""
+
+ # Print the line
+ output_lines.append((statement, context))
+
+ # Figure out the longest statement
+ longest_statement = max(len(line[0]) for line in output_lines[:-1])
+
+ # Build the lines, padding the statements so that the files line up
+ output = ""
+ for statement, context in output_lines:
+ output += f"{statement.ljust(longest_statement)}{context}\n"
+
+ # remove any trailing newlines
+ output = output.rstrip()
+
+ # Figure out the longest line
+ output_trace = "\n".join(output.splitlines()[:-1])
+ output_error = output.splitlines()[-1]
+ if args.trace_only:
+ longest_line = max(len(line) for line in output_trace.splitlines())
+ else:
+ longest_line = max(len(line) for line in output.splitlines())
+
+ # Pass over to rich to do the syntax highlighting
+ console = Console(record=True, width=longest_line + 1)
+ console.print(Syntax(output_trace, "python", background_color="default"))
+ if not args.trace_only:
+ console.print(
+ Syntax(output_error, "python", background_color="default")
+ )
+
+ # Export an image
+ file_name = f"Traceback {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
+ if args.file:
+ file_name += f" ({args.file.stem})"
+ file_name += ".svg"
+ OUTPUT_ROOT.mkdir(parents=True, exist_ok=True)
+ console.save_svg(
+ OUTPUT_ROOT / file_name, title="Evan's Python Traceback Visualizer"
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/qmk-helper b/scripts/qmk-helper
new file mode 100755
index 0000000..a3b6a47
--- /dev/null
+++ b/scripts/qmk-helper
@@ -0,0 +1,168 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import subprocess
+import shutil
+import os
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+QMK_REPOSITORY = "https://github.com/qmk/qmk_firmware"
+QMK_PINNED_COMMIT = "daabe2d8c5eab9d9d605f8e079dfae82d2b06a8d"
+QMK_CLONE_PATH = Path("~/src/qmk_firmware").expanduser()
+QMK_USERNAME = "ewpratten"
+LOCAL_KEYMAPS_ROOT = Path(os.environ["EWCONFIG_ROOT"]) / "keyboards" / "qmk" / "keymaps"
+
+
+def check_prerequisite_tools() -> bool:
+ # Ensure we have git
+ if shutil.which("git") is None:
+ logger.error("git is not installed")
+ return False
+
+ # Ensure we have make
+ if shutil.which("make") is None:
+ logger.error("make is not installed")
+ return False
+
+ # Ensure we have qmk
+ if shutil.which("qmk") is None:
+ logger.error("qmk is not installed")
+ return False
+
+ # OK
+ return True
+
+
+def refresh_qmk_repo():
+ # If the repo doesn't exist, clone it
+ if not QMK_CLONE_PATH.exists():
+ logger.info("Cloning QMK repository")
+ QMK_CLONE_PATH.parent.mkdir(parents=True, exist_ok=True)
+ subprocess.run(
+ ["git", "clone", QMK_REPOSITORY, QMK_CLONE_PATH],
+ check=True,
+ )
+
+ # Drop any local changes
+ logger.info("Dropping local changes")
+ subprocess.run(
+ ["git", "reset", "--hard"],
+ check=True,
+ cwd=QMK_CLONE_PATH,
+ )
+
+ # Pull the latest changes and then checkout the pinned commit
+ logger.info("Updating QMK repository")
+ subprocess.run(
+ ["git", "fetch", "--all"],
+ check=True,
+ cwd=QMK_CLONE_PATH,
+ )
+ subprocess.run(
+ ["git", "checkout", QMK_PINNED_COMMIT],
+ check=True,
+ cwd=QMK_CLONE_PATH,
+ )
+
+ # Update submodules
+ logger.info("Updating QMK submodules")
+ subprocess.run(
+ ["git", "submodule", "update", "--init", "--recursive"],
+ check=True,
+ cwd=QMK_CLONE_PATH,
+ )
+
+
+def copy_keymap(keyboard: str):
+ # Build the path that this keymap should be copied to
+ KEYMAP_PATH = QMK_CLONE_PATH / "keyboards" / keyboard / "keymaps" / QMK_USERNAME
+
+ # If the keymap already exists, delete it
+ if KEYMAP_PATH.exists():
+ logger.info("Removing existing keymap")
+ shutil.rmtree(KEYMAP_PATH)
+
+ # Copy the keymap
+ logger.info(f"Copying keymap to: {KEYMAP_PATH}")
+ shutil.copytree(LOCAL_KEYMAPS_ROOT / keyboard, KEYMAP_PATH)
+
+
+def build_keymap(keyboard: str):
+ # Build the keymap
+ logger.info(f"Building keymap: {keyboard}")
+ subprocess.run(
+ ["make", keyboard + ":" + QMK_USERNAME],
+ check=True,
+ cwd=QMK_CLONE_PATH,
+ )
+
+
+def flash_keymap(keyboard: str, flash_mode: str):
+ # Flash the keymap
+ logger.info(f"Flashing keymap: {keyboard} ({flash_mode})")
+ subprocess.run(
+ ["qmk", "flash", "-kb", keyboard, "-km", QMK_USERNAME, "-bl", flash_mode],
+ check=True,
+ cwd=QMK_CLONE_PATH,
+ )
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="qmk-helper", description="Utility for flashing QMK boards"
+ )
+ ap.add_argument("mode", choices=["build", "flash"], help="Mode to run in")
+ ap.add_argument(
+ "keyboard",
+ help="Keyboard to build/flash",
+ choices=["tg4x", "ferris/sweep"],
+ )
+ ap.add_argument("--flash-mode", "-f", help="Flash mode to use", default="flash")
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Check for prerequisite tools
+ if not check_prerequisite_tools():
+ return 1
+ logger.info("Prerequisite tools found")
+
+ # Refresh the QMK repository
+ refresh_qmk_repo()
+
+ # Figure out the keymap name
+ keymap = args.keyboard.split("/")[0]
+
+ # Copy the keymap
+ copy_keymap(keymap)
+
+ # Handle the modes
+ if args.mode == "build":
+ build_keymap(args.keyboard)
+ elif args.mode == "flash":
+ # Make sure that the flash mode is valid
+ if args.keyboard == "ferris/sweep" and args.flash_mode not in ["dfu-split-left", "dfu-split-right"]:
+ logger.error(
+ "Invalid flash mode. Must be one of: dfu-split-left, dfu-split-right"
+ )
+ return 1
+
+ # Flash
+ flash_keymap(args.keyboard, args.flash_mode)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/rbn-grep b/scripts/rbn-grep
new file mode 100755
index 0000000..e349db9
--- /dev/null
+++ b/scripts/rbn-grep
@@ -0,0 +1,88 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import re
+import socket
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="rbn-grep", description="Grep against RBN data in real-time"
+ )
+ ap.add_argument(
+ "--callsign", "-c", help="REGEX for the TX-ing callsign", default=".*"
+ )
+ ap.add_argument(
+ "--spotter", "-s", help="REGEX for the spotting callsign", default=".*"
+ )
+ ap.add_argument(
+ "--min-frequency", "--fl", help="Minimum frequency in kc", type=float, default=0
+ )
+ ap.add_argument(
+ "--max-frequency",
+ "--fh",
+ help="Maximum frequency in kc",
+ type=float,
+ default=sys.maxsize,
+ )
+ ap.add_argument(
+ "--stream-type",
+ "--st",
+ help="Stream type",
+ default="analog",
+ choices=["analog", "digital"],
+ )
+ ap.add_argument("--login-callsign", help="Login callsign", default="n0call")
+ args = ap.parse_args()
+
+ # Compile regexes
+ callsign_regex = re.compile(args.callsign, re.IGNORECASE)
+ spotter_regex = re.compile(args.spotter, re.IGNORECASE)
+
+ # Connect to the RBN Telnet servers
+ conn = socket.create_connection(
+ ("telnet.reversebeacon.net", 7000 if args.stream_type == "analog" else 7001)
+ )
+
+ # Log in
+ conn.send(f"{args.login_callsign}\n".encode("ascii"))
+
+ # Read lines and filter
+ try:
+ while True:
+ # Read a line
+ lines = conn.recv(1024).decode("ascii").strip()
+ for line in lines.splitlines():
+ line_split = [x for x in line.split(" ") if x]
+
+ # Ignore bad lines
+ if not line.startswith("DX"):
+ continue
+
+ # Parse the data
+ spotter = line_split[2]
+ frequency_kc = float(line_split[3])
+ spotted = line_split[4]
+
+ # Filter
+ if (
+ not callsign_regex.match(spotted)
+ or not spotter_regex.match(spotter)
+ or frequency_kc < args.min_frequency
+ or frequency_kc > args.max_frequency
+ ):
+ continue
+
+ # Print the line
+ print(line)
+
+ except KeyboardInterrupt:
+ print("\nGoodbye")
+ conn.close()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/run-logid b/scripts/run-logid
new file mode 100755
index 0000000..9c2720b
--- /dev/null
+++ b/scripts/run-logid
@@ -0,0 +1,4 @@
+#! /bin/bash
+set -ex
+
+logid -v -c ~/.config/logid/logid.cfg
diff --git a/scripts/scp-make-upload-acl b/scripts/scp-make-upload-acl
new file mode 100755
index 0000000..faf4f1e
--- /dev/null
+++ b/scripts/scp-make-upload-acl
@@ -0,0 +1,17 @@
+#! /bin/sh
+set -e
+
+# Require an argument containing a directory
+if [ $# -ne 3 ]; then
+ echo "Usage: scp-make-upload-acl "
+ exit 1
+fi
+
+# Require the directory to exist
+if [ ! -d "$1" ]; then
+ echo "Directory $1 does not exist"
+ exit 1
+fi
+
+# If all is ok, then print out the authorized_keys line that restricts that key to that directory
+echo "command=\"scp -t $1\",no-agent-forwarding,no-port-forwarding,no-pty,no-user-rc,no-X11-forwarding $2 $3"
diff --git a/scripts/sh2img b/scripts/sh2img
new file mode 100755
index 0000000..126306a
--- /dev/null
+++ b/scripts/sh2img
@@ -0,0 +1,47 @@
+#! /usr/bin/env python
+import sys
+import os
+import subprocess
+import argparse
+from pathlib import Path
+from rich.console import Console
+from rich.syntax import Syntax
+from datetime import datetime
+
+def main() -> int:
+ # Read the arguments
+ ap = argparse.ArgumentParser(prog="sh2img", description="Generate images from shell commands")
+ ap.add_argument("command", help="The command to execute", nargs="+")
+ ap.add_argument("--shell", "-s", help="The shell to use")
+ args = ap.parse_args()
+
+ # Figure out if we are root
+ is_root = os.geteuid() == 0
+ shell_char = "#" if is_root else "$"
+
+ # Set up the console
+ console = Console(record=True)
+
+ # Print out the arguments as a command being executed
+ console.print(f"{shell_char} {' '.join(args.command)}", style="white", highlight=False)
+ if args.shell:
+ args.command = [args.shell, "-c", " ".join(args.command)]
+
+ # Execute the program, capturing all output together in one string
+ output = subprocess.run(args.command, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
+ output = output.stdout.decode("utf-8").strip()
+
+ console.width = min(max(len(line) for line in output.splitlines()), 150)
+
+ # Print the output
+ console.print(output, highlight=False)
+
+ # Save to a file
+ out_file = Path("~/Pictures/sh2img").expanduser() / f"{datetime.now().timestamp()}.svg"
+ out_file.parent.mkdir(parents=True, exist_ok=True)
+ console.save_svg(out_file, title=args.command[0])
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/show-pythonpath b/scripts/show-pythonpath
new file mode 100755
index 0000000..13271c5
--- /dev/null
+++ b/scripts/show-pythonpath
@@ -0,0 +1,5 @@
+#! /usr/bin/env python
+import sys
+
+for entry in sys.path:
+ print(" - " + entry)
diff --git a/scripts/tinker b/scripts/tinker
new file mode 100755
index 0000000..2cca4a0
--- /dev/null
+++ b/scripts/tinker
@@ -0,0 +1,11 @@
+#! /bin/bash
+# NOTE: This is a script so that we can launch it from a gnome keybind
+set -e
+
+# If PYTHONSTARTUP is not set, we have to set it
+if [ -z "$PYTHONSTARTUP" ]; then
+ export PYTHONSTARTUP="$HOME/.config/ewconfig/configs/python/python_startup.py"
+fi
+
+# Launch python
+PYTHON_TINKER_MODE=1 python3
\ No newline at end of file
diff --git a/scripts/ufw-del b/scripts/ufw-del
new file mode 100755
index 0000000..11a5488
--- /dev/null
+++ b/scripts/ufw-del
@@ -0,0 +1,50 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog='ufw-del', description='Delete UFW rules by their comment')
+ ap.add_argument('comment', help='Comment to delete')
+ ap.add_argument("--dry-run", help="Don't actually delete the rules", action="store_true")
+ ap.add_argument('-v', '--verbose', help='Enable verbose logging', action='store_true')
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format='%(levelname)s: %(message)s',
+ )
+
+ # Call ufw to get a list of rules
+ logger.info('Getting list of rules...')
+ rules = subprocess.run(['sudo', 'ufw', 'status', 'numbered'], capture_output=True, text=True).stdout.split('\n')
+ rules = [rule for rule in rules if rule.startswith('[')]
+ logger.info(f'Found {len(rules)} rules')
+
+ # Reshape the rules list to be (number, line) tuples
+ rules = [(int(rule.split('[')[1].split(']')[0].strip()), rule) for rule in rules]
+
+ # Sort descending by rule number
+ rules.sort(key=lambda x: x[0], reverse=True)
+
+ # Delete rules with the specified comment
+ for rule_num, rule in rules:
+ if "#" in rule:
+ comment = rule.split('#')[1].strip()
+ if comment == args.comment:
+ rule_id = rule.split("]")[0].strip('[').strip()
+ logger.info(f'Deleting rule {rule_id}...')
+
+ if not args.dry_run:
+ subprocess.run(['sudo', 'ufw', 'delete', rule_id])
+
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/ufw-gen b/scripts/ufw-gen
new file mode 100755
index 0000000..851470a
--- /dev/null
+++ b/scripts/ufw-gen
@@ -0,0 +1,114 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+PROFILES = {
+ "minecraft": {
+ "comment": "Minecraft Server",
+ "ports": [(25565, "tcp"), (25565, "udp")],
+ },
+ "unturned": {
+ "comment": "Unturned Server",
+ "ports": [(27015, "tcp"), (27015, "udp"), (27016, "tcp"), (27016, "udp")],
+ },
+ "zola": {
+ "comment": "Zola",
+ "ports": [(1111, "tcp")],
+ },
+ "kdeconnect": {
+ "comment": "KDE Connect",
+ "ports": [("1714:1764", "udp"), ("1714:1764", "tcp")],
+ },
+ "asterisk": {
+ "comment": "Asterisk",
+ "ports": [
+ (5060, "udp"),
+ (4569, "udp"),
+ (4569, "tcp"),
+ (5060, "tcp"),
+ (5061, "tcp"),
+ ("10000:20000", "udp"),
+ ],
+ },
+ "elproxy": {
+ "comment": "ElProxy",
+ "ports": [(8100, "tcp"), (5198, "udp"), (5199, "udp"), (5200, "tcp")],
+ },
+}
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="ufw-gen", description="Generate UFW allow commands"
+ )
+ ap.add_argument(
+ "profile",
+ help="Profile to generate UFW allow commands for",
+ choices=PROFILES.keys(),
+ )
+ ap.add_argument("--from", help="Source IP address", default="any", dest="source")
+ ap.add_argument("--to", help="Destination IP address", default="any", dest="dest")
+ ap.add_argument(
+ "--no-sudo", help="Don't prefix commands with sudo", action="store_true"
+ )
+ ap.add_argument(
+ "--dry-run", help="Generate UFW commands in dry-run mode", action="store_true"
+ )
+ ap.add_argument(
+ "--execute", "-x", help="Execute generated UFW commands", action="store_true"
+ )
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Generate UFW allow commands
+ profile = PROFILES[args.profile]
+ for port, proto in profile["ports"]:
+ sudo_str = "" if args.no_sudo else "sudo "
+ dry_run_str = "--dry-run" if args.dry_run else ""
+
+ # Generate the command
+ command = ["sudo", "ufw"] if not args.no_sudo else ["ufw"]
+ if args.dry_run:
+ command.append("--dry-run")
+ command.extend(
+ [
+ "allow",
+ "from",
+ args.source,
+ "to",
+ args.dest,
+ "port",
+ str(port),
+ "proto",
+ proto,
+ "comment",
+ profile["comment"],
+ ]
+ )
+
+ # Run
+ print(" ".join(command))
+ if args.execute:
+ result = subprocess.run(command).returncode
+ if result != 0:
+ logger.error("Failed to run command: %s", command)
+ return result
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/usdnc-to-usd b/scripts/usdnc-to-usd
new file mode 100755
index 0000000..9696096
--- /dev/null
+++ b/scripts/usdnc-to-usd
@@ -0,0 +1,47 @@
+#! /usr/bin/env -S hython-latest -I
+import argparse
+import sys
+from pxr import Usd
+from pathlib import Path
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="usdnc-to-usd", description="Convert USDNC files to USD"
+ )
+ ap.add_argument("input", help="Input file", type=Path)
+ ap.add_argument(
+ "--output",
+ "-o",
+ help="Output file. Defaults to the input file with a new extension.",
+ type=Path,
+ default=None,
+ )
+ ap.add_argument(
+ "--format",
+ "-f",
+ help="Output format. Defaults to usda.",
+ type=str,
+ default="usda",
+ choices=["usda", "usdc"],
+ )
+ args = ap.parse_args()
+
+ # Read the input file
+ print(f"Opening stage from: {args.input}")
+ stage = Usd.Stage.Open(args.input.as_posix())
+
+ # Determine the output file
+ if not args.output:
+ args.output = args.input.with_suffix(f".{args.format}")
+
+ # Write the output file
+ print(f"Writing stage to: {args.output}")
+ stage.Export(args.output.as_posix())
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/video_trimmer b/scripts/video_trimmer
new file mode 100755
index 0000000..36e9186
--- /dev/null
+++ b/scripts/video_trimmer
@@ -0,0 +1,243 @@
+#! /usr/bin/env python3
+import os
+import argparse
+import sys
+import re
+import datetime
+import subprocess
+import tkinter as tk
+from pathlib import Path
+
+ALLOWED_INPUT_FORMATS = ["mp4"]
+TIMESTAMP_FORMAT_RE = re.compile(r"^\d{2}:\d{2}:\d{2}$")
+RENDER_MODES = ["Video + Audio", "Video Only", "Audio Only"]
+
+
+def open_in_video_player(file: Path):
+ os.system(f'xdg-open "{file}"')
+
+
+def render(
+ input_file: Path,
+ output_file: Path,
+ start_timestamp: str,
+ end_timestamp: str,
+ mode: str,
+):
+ # Construct the appropriate ffmpeg command
+ ffmpeg_command = ["ffmpeg"]
+
+ # Add the input file
+ ffmpeg_command += ["-i", str(input_file)]
+
+ # Add the start and end timestamps
+ ffmpeg_command += ["-ss", start_timestamp]
+ ffmpeg_command += ["-to", end_timestamp]
+
+ # Add the mode
+ if mode == "Video + Audio":
+ ffmpeg_command += ["-c", "copy"]
+ elif mode == "Video Only":
+ ffmpeg_command += ["-c", "copy"]
+ ffmpeg_command += ["-an"]
+ elif mode == "Audio Only":
+ ffmpeg_command += ["-vn"]
+
+ # Add the output file
+ ffmpeg_command += [str(output_file)]
+
+ # Run the command. Open in a new terminal window
+ subprocess.call(ffmpeg_command)
+
+
+def do_preview(input_file: Path, start_timestamp: str, end_timestamp: str, mode: str):
+ # Start by rendering to a tempfile with the same extension as the input file
+ temp_file = Path("/tmp") / f"{input_file.stem}_trimmed{input_file.suffix}"
+ temp_file.unlink(missing_ok=True)
+ render(input_file, temp_file, start_timestamp, end_timestamp, mode)
+
+ # Display the temp file in a video player
+ open_in_video_player(temp_file)
+
+
+def do_render(input_file: Path, start_timestamp: str, end_timestamp: str, mode: str):
+ # Create the new file beside the old one
+ start_time_str = start_timestamp.replace(":", ".")
+ end_time_str = end_timestamp.replace(":", ".")
+ file_suffix = ".mp3" if mode == "Audio Only" else input_file.suffix
+ output_file = (
+ input_file.parent
+ / f"{input_file.stem}_trimmed_{start_time_str}_{end_time_str}_render{file_suffix}"
+ )
+ output_file.unlink(missing_ok=True)
+
+ # Call the render function
+ render(input_file, output_file, start_timestamp, end_timestamp, mode)
+
+ # Copy the timestamp metadata from the original file (force overwrite)
+ subprocess.call(["ffmpeg", "-i", str(input_file), "-i", str(output_file), "-map_metadata", "0", "-map", "0", "-map", "1", "-c", "copy", "-y", str(output_file)])
+
+ # Set the file timestamp to the same as the original file
+ subprocess.call(["touch", "-r", str(input_file), str(output_file)])
+
+
+def build_gui(input_file: Path) -> tk.Tk:
+ # Build the GUI
+ root = tk.Tk()
+ root.title("Evan's Video Trimmer")
+ # root.geometry("280x500")
+
+ # Add a section title
+ title = tk.Label(root, text="Input File")
+ title.grid(row=0, column=0, columnspan=2)
+
+ # Add a button to open the original file
+ open_original_button = tk.Button(
+ root,
+ text="Open original file",
+ command=lambda: open_in_video_player(input_file),
+ )
+ open_original_button.grid(row=2, column=0, columnspan=2, pady=2)
+
+ # Add a horizontal separator
+ separator = tk.Frame(height=2, bd=1, relief=tk.SUNKEN)
+ separator.grid(row=3, column=0, columnspan=2, sticky=tk.W + tk.E, pady=2)
+
+ # Add a section title
+ title = tk.Label(root, text="Output Controls")
+ title.grid(row=4, column=0, columnspan=2, pady=2)
+
+ # Add an input field for start timestamp
+ start_timestamp = tk.StringVar()
+ start_timestamp.set("00:00:00")
+ start_timestamp_label = tk.Label(root, text="Start Timestamp")
+ start_timestamp_label.grid(row=5, column=0, sticky=tk.E)
+ start_timestamp_input = tk.Entry(root, textvariable=start_timestamp)
+ start_timestamp_input.grid(row=5, column=1)
+
+ # Add an input field for end timestamp
+ end_timestamp = tk.StringVar()
+ end_timestamp.set("00:00:00")
+ end_timestamp_label = tk.Label(root, text="End Timestamp")
+ end_timestamp_label.grid(row=6, column=0, sticky=tk.E)
+ end_timestamp_input = tk.Entry(root, textvariable=end_timestamp)
+ end_timestamp_input.grid(row=6, column=1)
+
+ # Add a "mode" dropdown
+ mode = tk.StringVar()
+ mode.set(RENDER_MODES[0])
+ mode_label = tk.Label(root, text="Trim Mode")
+ mode_label.grid(row=7, column=0, sticky=tk.E)
+ mode_input = tk.OptionMenu(root, mode, *RENDER_MODES)
+ mode_input.grid(row=7, column=1, sticky="we")
+
+ # Add a horizontal separator
+ separator = tk.Frame(height=2, bd=1, relief=tk.SUNKEN)
+ separator.grid(row=8, column=0, columnspan=2, sticky=tk.W + tk.E, pady=2)
+
+ # Function to pre-validate inputs
+ def validate_inputs():
+ # The start timestamp must be hh:mm:ss
+ if not TIMESTAMP_FORMAT_RE.match(start_timestamp.get()):
+ popup_error(
+ "Start timestamp must be in hh:mm:ss format", quit_on_close=False
+ )
+ return False
+ # The end timestamp must be hh:mm:ss
+ if not TIMESTAMP_FORMAT_RE.match(end_timestamp.get()):
+ popup_error("End timestamp must be in hh:mm:ss format", quit_on_close=False)
+ return False
+ # The end timestamp must be after the start timestamp
+ start_time = datetime.datetime.strptime(start_timestamp.get(), "%H:%M:%S")
+ end_time = datetime.datetime.strptime(end_timestamp.get(), "%H:%M:%S")
+ if end_time <= start_time:
+ popup_error(
+ "End timestamp must be after start timestamp", quit_on_close=False
+ )
+ return False
+ return True
+
+ # Add a button to preview the output
+ preview_button = tk.Button(
+ root,
+ text="Preview Output",
+ command=lambda: do_preview(
+ input_file, start_timestamp.get(), end_timestamp.get(), mode.get()
+ )
+ if validate_inputs()
+ else None,
+ )
+ preview_button.grid(row=9, column=0, pady=2, sticky="we")
+
+ # Add a button to render the output
+ render_button = tk.Button(
+ root,
+ text="Render",
+ command=lambda: do_render(
+ input_file, start_timestamp.get(), end_timestamp.get(), mode.get()
+ )
+ if validate_inputs()
+ else None,
+ )
+ render_button.grid(row=9, column=1, pady=2, sticky="we")
+
+ return root
+
+
+def popup_error(message: str, quit_on_close: bool = True):
+ # Make a popup window
+ popup = tk.Tk()
+ popup.wm_title("Error")
+
+ # Add a message
+ label = tk.Label(popup, text=message)
+ label.pack(side="top", fill="x", pady=10)
+
+ # Add a button to close the popup
+ button = tk.Button(popup, text="Okay", command=popup.destroy)
+ button.pack()
+
+ # Run the popup
+ popup.mainloop()
+
+ # Exit the program
+ if quit_on_close:
+ sys.exit(1)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser()
+ ap.add_argument(
+ "--file", help="File to open in Video Trimmer", type=str, required=False
+ )
+ args = ap.parse_args()
+
+ # Read the file
+ uncut_file = Path(args.file) if args.file else None
+ if not uncut_file:
+ # Try to read from env
+ if "NAUTILUS_SCRIPT_SELECTED_FILE_PATHS" not in os.environ:
+ popup_error("No file selected")
+ return 1
+ uncut_file = Path(
+ os.environ["NAUTILUS_SCRIPT_SELECTED_FILE_PATHS"].splitlines()[0]
+ )
+
+ # Require one of the acceptable file types
+ if uncut_file.suffix[1:].lower() not in ALLOWED_INPUT_FORMATS:
+ popup_error(
+ f"File type not supported: {uncut_file.suffix}\n"
+ f"Supported types: {ALLOWED_INPUT_FORMATS}"
+ )
+ return 1
+
+ # Build the GUI and run
+ root = build_gui(uncut_file)
+ root.mainloop()
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/wg-genzone b/scripts/wg-genzone
new file mode 100755
index 0000000..6891568
--- /dev/null
+++ b/scripts/wg-genzone
@@ -0,0 +1,144 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import subprocess
+import ipaddress
+import json
+from typing import Optional, List, Tuple, Union
+from dataclasses import dataclass
+
+
+@dataclass
+class PeerMetadata:
+ host: str
+ namespace: Optional[str] = None
+
+
+def get_interface_config(interface: str, sudo: bool = False) -> Optional[str]:
+ # Execute wg-quick to get the interface config
+ try:
+ cmd = ["wg-quick", "strip", interface]
+ if sudo:
+ cmd.insert(0, "sudo")
+ output = subprocess.check_output(cmd, text=True)
+ except subprocess.CalledProcessError as e:
+ print(f"Error executing wg-quick: {e}", file=sys.stderr)
+ return None
+
+ return output
+
+
+def get_addr_maps(
+ config: str,
+) -> List[
+ Tuple[PeerMetadata, List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]]
+]:
+ # Split into lines
+ lines = config.splitlines()
+
+ # Read until the first peer definition
+ while lines and not lines[0].startswith("[Peer]"):
+ lines.pop(0)
+
+ # Read the peer definitions
+ output = []
+ while len(lines) > 0:
+ # Read the peer definition
+ peer_line = lines.pop(0).split("#")
+
+ # Skip peers without metadata
+ if len(peer_line) == 1 or peer_line[1].strip() == "":
+ while len(lines) > 0 and not lines[0].startswith("[Peer]"):
+ lines.pop(0)
+ continue
+
+ # The metadata is JSON
+ metadata = json.loads(peer_line[1])
+ metadata = PeerMetadata(host=metadata["host"], namespace=metadata.get("ns"))
+
+ # Skim through everything until the next peer definition ( or EOF ) in search of allowed ips
+ allowed_ips = []
+ while len(lines) > 0 and not lines[0].startswith("[Peer]"):
+ # If this is an allowed ip line, parse it
+ if lines[0].startswith("AllowedIPs"):
+ allowed_ips_line = lines[0].split("#")[0]
+ allowed_ips.extend(
+ [
+ ipaddress.ip_network(addr.strip())
+ for addr in (allowed_ips_line.split("=")[1].strip()).split(",")
+ ]
+ )
+
+ # Pop the line
+ lines.pop(0)
+
+ # Find any ips that are a /32 (ipv4) or /128 (ipv6)
+ addresses = []
+ for allowed_ip in allowed_ips:
+ if (
+ isinstance(allowed_ip, ipaddress.IPv4Network)
+ and allowed_ip.prefixlen == 32
+ ):
+ addresses.append(allowed_ip.network_address)
+ elif (
+ isinstance(allowed_ip, ipaddress.IPv6Network)
+ and allowed_ip.prefixlen == 128
+ ):
+ addresses.append(allowed_ip.network_address)
+
+ # Build the output
+ output.append((metadata, addresses))
+
+ return output
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="wg-genzone",
+ description="Generates a DNS zone file for a WireGuard interface",
+ )
+ ap.add_argument("interface", help="The name of the WireGuard interface")
+ ap.add_argument("--zone", help="The name of the zone to generate", required=True)
+ ap.add_argument(
+ "--no-sudo", action="store_true", help="Do not use sudo to execute wg-quick"
+ )
+ ap.add_argument("--ttl", help="The TTL to use for the zone", default=60)
+ args = ap.parse_args()
+
+ # Read the interface config
+ config = get_interface_config(args.interface, sudo=not args.no_sudo)
+ if not config:
+ return 1
+
+ # Get a mapping of metadata to addresses
+ addr_maps = get_addr_maps(config)
+
+ # Convert to a zone file
+ print(f"$ORIGIN {args.zone}.")
+ print(f"$TTL {args.ttl}")
+ print(f"@ IN SOA ns.{args.zone}. noc.ewpratten.com. 1 3600 600 86400 60")
+
+ # Add the hosts
+ for metadata, addresses in addr_maps:
+ # Build the host's address
+ host = metadata.host
+ if metadata.namespace:
+ host = f"{host}.{metadata.namespace}"
+ host = f"{host}.{args.zone}"
+
+ # Add forward and reverse records
+ for address in addresses:
+ if isinstance(address, ipaddress.IPv4Address):
+ print(f"{host}. IN A {address}")
+ print(f"{address.reverse_pointer}. IN PTR {host}.")
+ elif isinstance(address, ipaddress.IPv6Address):
+ print(f"{host}. IN AAAA {address}")
+ print(f"{address.reverse_pointer}. IN PTR {host}.")
+
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/wg-get-client-ip b/scripts/wg-get-client-ip
new file mode 100755
index 0000000..c2f1450
--- /dev/null
+++ b/scripts/wg-get-client-ip
@@ -0,0 +1,11 @@
+#! /bin/bash
+set -e
+
+# We need 1 argument
+if [ $# -ne 1 ]; then
+ echo "Usage: wg-get-client-ip "
+ exit 1
+fi
+
+# NOTE: The final `grep` call is needed to make this fail if the pubkey is not found
+sudo wg show all dump | grep $1 | cut -f 4 | cut -d: -f1 | grep ""
diff --git a/scripts/wg-handshakes b/scripts/wg-handshakes
new file mode 100755
index 0000000..74528dc
--- /dev/null
+++ b/scripts/wg-handshakes
@@ -0,0 +1,138 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import subprocess
+from datetime import datetime
+from typing import List, Optional
+
+try:
+ import timeago
+except ImportError:
+ print(
+ "Required dependency missing. Install by running: pip3 install timeago",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def get_name_for_client(
+ pubkey: str, endpoint: str, allowed_ips: List[str], dns_server: Optional[str] = None
+) -> str:
+ # Build the dig command prefix
+ dig_cmd_pfx = ["dig"]
+ if dns_server:
+ dig_cmd_pfx.append(f"@{dns_server}")
+
+ # Search through the allowed ips for addresses with reverse dns
+ for ip in allowed_ips:
+ ip = ip.split("/")[0]
+ try:
+ name = subprocess.check_output(dig_cmd_pfx + ["-x", ip, "+short"]).decode(
+ "utf-8"
+ )
+ name = name.strip()
+ if name != "":
+ return name[:-1]
+ except subprocess.CalledProcessError:
+ pass
+
+ # Check the endpoint for reverse dns
+ try:
+ name = subprocess.check_output(dig_cmd_pfx + ["-x", endpoint, "+short"]).decode(
+ "utf-8"
+ )
+ name = name.strip()
+ if name != "":
+ return name[:-1]
+ except subprocess.CalledProcessError:
+ pass
+
+ # If all else fails, return the first 8 characters of the public key followed by ...
+ return pubkey[:8] + "..."
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(
+ prog="wg-handshakes", description="List the recency of WireGuard handshakes"
+ )
+ ap.add_argument(
+ "--interface", "-i", help="The WireGuard interface to use", default="all"
+ )
+ ap.add_argument(
+ "--no-sudo", help="Do not use sudo when running commands", action="store_true"
+ )
+ ap.add_argument(
+ "--dns-server", "-d", help="Override the DNS server to use for RDNS lookups"
+ )
+ ap.add_argument(
+ "--format",
+ default="text",
+ choices=["text", "html"],
+ help="The format to output",
+ )
+ args = ap.parse_args()
+
+ # Get the output of wg show
+ cmd = ["wg", "show", args.interface, "dump"]
+ if not args.no_sudo:
+ cmd.insert(0, "sudo")
+ output = subprocess.check_output(cmd).decode("utf-8")
+
+ # For every line (client) except the first (this device)
+ lines = output.split("\n")[1:]
+ outputs = []
+ for line in lines:
+ # values are in TSV
+ values = line.split("\t")
+
+ # If the interface is `all`, ignore the first value
+ if args.interface == "all":
+ values = values[1:]
+
+ # If the line is empty, skip it
+ if len(values) == 0:
+ continue
+
+ # Get the client's public key
+ pubkey = values[0]
+
+ # Read the IPs of the client to guess its name
+ allowed_ips = values[3].split(",")
+ endpoint = values[2].split(":")[0]
+
+ # Get the name of the client
+ name = get_name_for_client(pubkey, endpoint, allowed_ips, args.dns_server)
+
+ # Get the time of the last handshake
+ last_handshake = datetime.fromtimestamp(int(values[4]))
+ time_ago = (
+ timeago.format(last_handshake, datetime.now())
+ if values[4] != "0"
+ else "Never"
+ )
+
+ outputs.append((name, last_handshake, time_ago))
+
+ # Sort the outputs by time
+ outputs.sort(key=lambda x: x[1], reverse=True)
+
+ # Print the outputs
+ if args.format == "text":
+ for output in outputs:
+ print(f"{output[0]}: {output[2]}")
+ elif args.format == "html":
+ print("WireGuard Handshakes")
+ print("")
+ print("")
+ print("Name | Last Handshake |
")
+ for output in outputs:
+ print(f"{output[0]} | {output[2]} |
")
+ print("
")
+ print("")
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/wol b/scripts/wol
new file mode 100755
index 0000000..5d32cf0
--- /dev/null
+++ b/scripts/wol
@@ -0,0 +1,57 @@
+#! /usr/bin/env python3
+import argparse
+import sys
+import logging
+import socket
+import struct
+
+logger = logging.getLogger(__name__)
+
+
+def main() -> int:
+ # Handle program arguments
+ ap = argparse.ArgumentParser(prog="wol", description="Wake On LAN")
+ ap.add_argument("mac", help="MAC address of the target machine")
+ ap.add_argument(
+ "-p", "--port", help="UDP port to send the magic packet to", type=int, default=9
+ )
+ ap.add_argument(
+ "-v", "--verbose", help="Enable verbose logging", action="store_true"
+ )
+ args = ap.parse_args()
+
+ # Configure logging
+ logging.basicConfig(
+ level=logging.DEBUG if args.verbose else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Parse MAC address
+ mac = args.mac.replace(":", "").replace("-", "").replace(".", "")
+ if len(mac) != 12:
+ logger.error("Invalid MAC address")
+ return 1
+ mac = bytes.fromhex(mac)
+
+ # Create magic packet
+ magic_packet = b"\xff" * 6 + mac * 16
+
+ # Try to broadcast magic packet over both IPv4 and IPv6
+ try:
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
+ s.sendto(magic_packet, ("", args.port))
+ except OSError as e:
+ logger.error("Failed to broadcast magic packet over IPv4: %s", e)
+ try:
+ with socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) as s:
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
+ s.sendto(magic_packet, ("ff02::1", args.port))
+ except OSError as e:
+ logger.error("Failed to broadcast magic packet over IPv6: %s", e)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/third_party/ufw-application-profiles b/third_party/ufw-application-profiles
new file mode 160000
index 0000000..63cdef1
--- /dev/null
+++ b/third_party/ufw-application-profiles
@@ -0,0 +1 @@
+Subproject commit 63cdef13e1fcaa41da500763dc2ca95da0f31486