diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml
new file mode 100644
index 0000000..e25f606
--- /dev/null
+++ b/.github/workflows/lint.yaml
@@ -0,0 +1,25 @@
+name: Python Linting
+
+on: [push, pull_request]
+
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.11'
+
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install flake8 black
+
+ - name: Check with Flake8
+ run: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
+
+ - name: Format code with Black
+ run: black --check .
diff --git a/.gitignore b/.gitignore
index 3d2a200..e5b9b43 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,6 @@
+exchanges/data_folder
+exchanges/constants/.env
/venv
__pycache__/
.env
+exchanges/*.json
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..6a1480f
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,674 @@
+GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C) 2023 Adam
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Siwa-lite Copyright (C) 2023 Adam
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
\ No newline at end of file
diff --git a/all_feeds.py b/all_feeds.py
index 4edeccb..4aedc35 100644
--- a/all_feeds.py
+++ b/all_feeds.py
@@ -4,11 +4,11 @@
Test = test_feed.Test
MCAP1000 = mcap1000.MCAP1000
-#NOTE: this is a dict of all feed classes that SIWA can run, keyed by feed name
+# NOTE: this is a dict of all feed classes that SIWA can run, keyed by feed name
# this is used in endpoint.py to route requests to the correct feed
#
-#TO ENABLE OR DISABLE A FEED, ADD OR REMOVE IT FROM THIS DICT
+# TO ENABLE OR DISABLE A FEED, ADD OR REMOVE IT FROM THIS DICT
all_feeds = {
Test.NAME: Test,
MCAP1000.NAME: MCAP1000,
- }
+}
diff --git a/apis/coingecko.py b/apis/coingecko.py
index 7b24890..1b03af3 100644
--- a/apis/coingecko.py
+++ b/apis/coingecko.py
@@ -17,6 +17,7 @@ class CoinGeckoAPI(CryptoAPI):
extract_market_cap(data: Dict[str, Any]) -> Dict[float, Dict[str, str]]:
Extracts market cap data from API response.
"""
+
VS_CURRENCY = "usd"
ORDER = "market_cap_desc"
PAGE = 1
@@ -31,8 +32,7 @@ def __init__(self) -> None:
Constructs all the necessary attributes for the CoinGeckoAPI object.
"""
super().__init__(
- url="https://api.coingecko.com/api/v3/coins/markets",
- source='coingecko'
+ url="https://api.coingecko.com/api/v3/coins/markets", source="coingecko"
)
@utils.handle_request_errors
@@ -92,7 +92,7 @@ def get_market_caps_of_list(self, tokens: List[str]) -> Dict[str, float]:
"""
market_caps = {}
- tokens_comma_sep = ','.join(tokens)
+ tokens_comma_sep = ",".join(tokens)
parameters = {
"vs_currency": self.VS_CURRENCY,
@@ -107,7 +107,7 @@ def get_market_caps_of_list(self, tokens: List[str]) -> Dict[str, float]:
name = d.get(self.NAME_KEY)
last_updated = d.get(self.LAST_UPDATED_KEY)
market_caps[market_cap] = {
- 'name': name,
- 'last_updated': last_updated,
+ "name": name,
+ "last_updated": last_updated,
}
return market_caps
diff --git a/apis/coinmarketcap.py b/apis/coinmarketcap.py
index 48b9397..c527c59 100644
--- a/apis/coinmarketcap.py
+++ b/apis/coinmarketcap.py
@@ -33,10 +33,10 @@ def __init__(self) -> None:
"""
Constructs all the necessary attributes for the CoinMarketCapAPI object.
"""
- source = 'coinmarketcap'
+ source = "coinmarketcap"
super().__init__(
url="https://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest",
- source=source
+ source=source,
)
self.headers = {
self.CMC_PRO_API_KEY: self.get_api_key(source),
@@ -53,12 +53,8 @@ def get_data(self, N: int) -> Dict[str, Any]:
Returns:
Dict[str, Any]: A dictionary with data fetched from API.
"""
- parameters = {
- self.LIMIT: N
- }
- response = requests.get(
- self.url, headers=self.headers, params=parameters
- )
+ parameters = {self.LIMIT: N}
+ response = requests.get(self.url, headers=self.headers, params=parameters)
data = response.json()
return data
@@ -96,12 +92,8 @@ def get_market_cap_of_token(self, id: int) -> Dict[str, float]:
Dict[str, float]: A dictionary with market cap as keys and other metadata as values.
"""
url = "https://pro-api.coinmarketcap.com/v1/cryptocurrency/quotes/latest"
- parameters = {
- self.ID: id
- }
- response = requests.get(
- url, headers=self.headers, params=parameters
- )
+ parameters = {self.ID: id}
+ response = requests.get(url, headers=self.headers, params=parameters)
data = response.json()
market_cap_data = {}
@@ -111,9 +103,9 @@ def get_market_cap_of_token(self, id: int) -> Dict[str, float]:
last_updated = token_data[self.LAST_UPDATED]
market_cap = token_data[self.QUOTE][self.USD][self.MARKET_CAP]
market_cap_data = {
- 'name': name,
- 'market_cap': market_cap,
- 'last_updated': last_updated,
+ "name": name,
+ "market_cap": market_cap,
+ "last_updated": last_updated,
}
return market_cap_data
@@ -131,9 +123,9 @@ def get_market_caps_of_list(self, ids: List[int]) -> Dict[str, Dict[str, Any]]:
for id in ids:
mcap_data = self.get_market_cap_of_token(id)
if mcap_data:
- market_caps[mcap_data['market_cap']] = {
- 'name': mcap_data['name'],
- 'last_updated': mcap_data['last_updated']
+ market_caps[mcap_data["market_cap"]] = {
+ "name": mcap_data["name"],
+ "last_updated": mcap_data["last_updated"],
}
time.sleep(0.2) # To prevent hitting API rate limits
diff --git a/apis/coinpaprika.py b/apis/coinpaprika.py
index 2bfec92..173ad21 100644
--- a/apis/coinpaprika.py
+++ b/apis/coinpaprika.py
@@ -25,10 +25,11 @@ def __init__(self) -> None:
"""
Constructs all the necessary attributes for the CoinPaprikaAPI object.
"""
- self.ohlc_url = "https://api.coinpaprika.com/v1/coins/{coin_id}/ohlcv/latest" # noqa E501
+ self.ohlc_url = (
+ "https://api.coinpaprika.com/v1/coins/{coin_id}/ohlcv/latest" # noqa E501
+ )
super().__init__(
- url="https://api.coinpaprika.com/v1/coins",
- source='coinpaprika'
+ url="https://api.coinpaprika.com/v1/coins", source="coinpaprika"
)
@utils.handle_request_errors
@@ -49,17 +50,18 @@ def get_data(self, N: int) -> List[Dict[str, Any]]:
data = response.json()
else:
raise requests.exceptions.RequestException(
- f"Received status code {response.status_code} "
- f"for URL: {self.url}"
+ f"Received status code {response.status_code} " f"for URL: {self.url}"
)
# Sorting the coins by market cap
# Also filtering out coins with rank 0 (junk values in API response)
- filtered_data = [coin for coin in data if coin['rank'] != 0]
- sorted_data = sorted(filtered_data, key=lambda coin: coin['rank'])[:N]
+ filtered_data = [coin for coin in data if coin["rank"] != 0]
+ sorted_data = sorted(filtered_data, key=lambda coin: coin["rank"])[:N]
return sorted_data
@utils.handle_request_errors
- def extract_market_cap(self, data: List[Dict[str, Any]]) -> Dict[float, Dict[str, Any]]:
+ def extract_market_cap(
+ self, data: List[Dict[str, Any]]
+ ) -> Dict[float, Dict[str, Any]]:
"""
Extracts market cap data from API response.
@@ -87,7 +89,7 @@ def extract_market_cap(self, data: List[Dict[str, Any]]) -> Dict[float, Dict[str
name = coin["name"]
last_updated = 0 # Updated every 5 mins as per docs: https://api.coinpaprika.com/#tag/Coins/paths/~1coins~1%7Bcoin_id%7D~1ohlcv~1today~1/get # noqa E501
- market_cap = coin_info[0]['market_cap']
+ market_cap = coin_info[0]["market_cap"]
market_data[market_cap] = {
"name": name,
"last_updated": last_updated,
diff --git a/apis/crypto_api.py b/apis/crypto_api.py
index 6f9cafe..5e6aa76 100644
--- a/apis/crypto_api.py
+++ b/apis/crypto_api.py
@@ -20,7 +20,7 @@ class CryptoAPI:
Abstract method to extract market cap data.
"""
- API_KEYS_FILE = 'api_keys.json'
+ API_KEYS_FILE = "api_keys.json"
def __init__(self, url: str, source: str) -> None:
"""
@@ -52,9 +52,7 @@ def fetch_mcap_by_list(self, tokens: List[str]) -> Dict[str, float]:
# Store market data in the database
utils.create_market_cap_database()
- utils.store_market_cap_data(
- market_data=market_data, source=self.source
- )
+ utils.store_market_cap_data(market_data=market_data, source=self.source)
return market_data
def fetch_mcap_by_rank(self, N: int) -> dict:
@@ -77,9 +75,7 @@ def fetch_mcap_by_rank(self, N: int) -> dict:
# Store market data in the database
utils.create_market_cap_database()
- utils.store_market_cap_data(
- market_data=market_data, source=self.source
- )
+ utils.store_market_cap_data(market_data=market_data, source=self.source)
return market_data
def get_data(self, N: int) -> Any:
diff --git a/apis/cryptocompare.py b/apis/cryptocompare.py
index 5d73c53..90acdef 100644
--- a/apis/cryptocompare.py
+++ b/apis/cryptocompare.py
@@ -37,7 +37,7 @@ def __init__(self) -> None:
"""
super().__init__(
url="https://min-api.cryptocompare.com/data/top/mktcapfull",
- source='cryptocompare'
+ source="cryptocompare",
)
@utils.handle_request_errors
@@ -66,8 +66,7 @@ def get_data(self, N: int, buffer: int = 2) -> Dict[str, Any]:
data = response.json()
else:
raise requests.exceptions.RequestException(
- f"Received status code {response.status_code} "
- f"for URL: {self.url}"
+ f"Received status code {response.status_code} " f"for URL: {self.url}"
)
missing_count = 0
for coin in data[self.DATA]:
@@ -118,7 +117,7 @@ def get_market_caps_of_list(self, tokens: List[str]) -> Dict[str, Dict[str, Any]
"""
url = "https://min-api.cryptocompare.com/data/pricemultifull"
tokens_upper = [token.upper() for token in tokens]
- tokens_comma_sep = ','.join(tokens_upper)
+ tokens_comma_sep = ",".join(tokens_upper)
parameters = {
self.FSYMS: tokens_comma_sep,
self.TSYMS: self.USD,
@@ -133,7 +132,7 @@ def get_market_caps_of_list(self, tokens: List[str]) -> Dict[str, Dict[str, Any]
market_cap = data[self.RAW][token][self.USD][self.MKTCAP]
last_updated = data[self.RAW][token][self.USD][self.LAST_UPDATE]
market_caps[market_cap] = {
- 'name': token,
- 'last_updated': last_updated,
+ "name": token,
+ "last_updated": last_updated,
}
return market_caps
diff --git a/apis/unisat.py b/apis/unisat.py
index 5897dc6..d38fd1f 100644
--- a/apis/unisat.py
+++ b/apis/unisat.py
@@ -7,22 +7,164 @@
# import json
# from pathlib import Path
-# Adapted from:
+# Adapted from:
# https://docs.unisat.io/dev/unisat-developer-service/
# https://open-api.unisat.io/swagger.html
+# Headers
+headers = {
+ "accept": "application/json",
+ "Authorization": "Bearer 593b09946ab4c0749af07064803c7868c179e86162bf94c4b23a2b157f67c967",
+}
+
+URL = "https://open-api.unisat.io/v1/indexer/"
+
+
+def get_blockchain_info():
+ url = URL + "blockchain/info"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_block_txs(height):
+ url = URL + f"block/{height}/txs"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_tx_info(txid):
+ url = URL + f"tx/{txid}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_inscription_utxo(address):
+ url = URL + f"address/{address}/inscription-utxo-data"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_inscription_info(inscriptionid):
+ url = URL + f"inscription/info/{inscriptionid}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_brc20_list(start=0, limit=100):
+ url = URL + "brc20/list" + f"?start={start}&limit={limit}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_brc20_status(start=0, limit=10, sort="holders", complete="yes"):
+ """
+ sort by (holders/deploy/minted/transactions)
+ filter by (yes/no)
+ """
+ url = (
+ URL
+ + "brc20/status"
+ + f"?start={start}&limit={limit}&sort={sort}&complete={complete}"
+ )
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_brc20_ticker_info(ticker):
+ url = URL + f"brc20/{ticker}/info"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_brc20_holders(ticker):
+ url = URL + f"brc20/{ticker}/holders"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_brc20_ticker_history(ticker, txid, type, start=0, limit=100):
+ """
+ type: inscribe-deploy, inscribe-mint, inscribe-transfer, transfer, send, receive
+ """
+ url = (
+ URL + f"brc20/{ticker}/tx/{txid}" + f"?type={type}&start={start}&limit={limit}"
+ )
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_history_by_height(height, start=0, limit=100):
+ url = URL + f"brc20/history-by-height/{height}" + f"?start={start}&limit={limit}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_brc20_tx_history(ticker, txid, start=0, limit=100):
+ url = URL + f"brc20/{ticker}/tx/{txid}/history" + f"?start={start}&limit={limit}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_address_brc20_summary(address, start=0, limit=100):
+ url = URL + f"address/{address}/brc20/summary" + f"?start={start}&limit={limit}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_address_brc20_summary_by_height(address, height, start=0, limit=100):
+ url = (
+ URL
+ + f"address/{address}/brc20/summary-by-height/{height}"
+ + f"?start={start}&limit={limit}"
+ )
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_address_brc20_ticker_info(address, ticker):
+ url = URL + f"address/{address}/brc20/{ticker}/info"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_address_brc20_history(address, start=0, limit=100):
+ url = URL + f"address/{address}/brc20/history" + f"?start={start}&limit={limit}"
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_address_brc20_history_by_ticker(address, ticker, type, start=0, limit=100):
+ """
+ type: inscribe-deploy, inscribe-mint, inscribe-transfer, transfer, send, receive
+ """
+ url = (
+ URL
+ + f"address/{address}/brc20/{ticker}/history"
+ + f"?type={type}&start={start}&limit={limit}"
+ )
+ response = requests.get(url, headers=headers)
+ return response
+
+
+def get_transferable_inscriptions(address, ticker):
+ url = URL + f"address/{address}/brc20/{ticker}/transferable-inscriptions"
+ response = requests.get(url, headers=headers)
+ return response
+
+
# Load environment variables from .env file
load_dotenv()
+
+
class UnisatAPI:
-
def __init__(self):
- api_key = os.environ.get('UNISAT_API_KEY')
+ api_key = os.environ.get("UNISAT_API_KEY")
if api_key is None:
- raise ValueError('UNISAT_API_KEY environment variable is not set')
- self.base_url = 'https://open-api.unisat.io/v1/indexer/'
+ raise ValueError("UNISAT_API_KEY environment variable is not set")
+ self.base_url = "https://open-api.unisat.io/v1/indexer/"
self.headers = {
- 'accept': 'application/json',
- 'Authorization': f'Bearer {api_key}'
+ "accept": "application/json",
+ "Authorization": f"Bearer {api_key}",
}
self.api_key = api_key
@@ -30,85 +172,112 @@ def _make_request(self, endpoint, params=None):
url = self.base_url + endpoint
response = requests.get(url, headers=self.headers, params=params)
return response
-
+
def get_best_block_height(self):
- return self._make_request('brc20/bestheight')
+ return self._make_request("brc20/bestheight")
def get_blockchain_info(self):
- return self._make_request('blockchain/info')
+ return self._make_request("blockchain/info")
def get_block_txs(self, height):
- return self._make_request(f'block/{height}/txs')
+ return self._make_request(f"block/{height}/txs")
def get_tx_info(self, txid):
- return self._make_request(f'tx/{txid}')
+ return self._make_request(f"tx/{txid}")
def get_inscription_utxo(self, address):
- return self._make_request(f'address/{address}/inscription-utxo-data')
+ return self._make_request(f"address/{address}/inscription-utxo-data")
def get_inscription_info(self, inscriptionId):
- return self._make_request(f'inscription/info/{inscriptionId}')
+ return self._make_request(f"inscription/info/{inscriptionId}")
def get_brc20_list(self, start=0, limit=100):
- return self._make_request(f'brc20/list', {'start': start, 'limit': limit})
+ return self._make_request(f"brc20/list", {"start": start, "limit": limit})
- def get_brc20_status(self, start=0, limit=10, sort='holders', complete='yes'):
- '''
+ def get_brc20_status(self, start=0, limit=10, sort="holders", complete="yes"):
+ """
sort by (holders/deploy/minted/transactions)
- filter by (yes/no)
- '''
- return self._make_request(f'brc20/status', {'start': start, 'limit': limit, 'sort': sort, 'complete': complete})
+ filter by (yes/no)
+ """
+ return self._make_request(
+ f"brc20/status",
+ {"start": start, "limit": limit, "sort": sort, "complete": complete},
+ )
def get_brc20_ticker_info(self, ticker):
- return self._make_request(f'brc20/{ticker}/info')
+ return self._make_request(f"brc20/{ticker}/info")
def get_brc20_holders(self, ticker):
- return self._make_request(f'brc20/{ticker}/holders')
+ return self._make_request(f"brc20/{ticker}/holders")
def get_brc20_ticker_history(self, ticker, height, type, start=0, limit=100):
- '''
- type: inscribe-deploy, inscribe-mint, inscribe-transfer, transfer, send, receive
- '''
- return self._make_request(f'brc20/{ticker}/history', {'type': type, 'start': start, 'height': height, 'limit': limit})
+ """
+ type: inscribe-deploy, inscribe-mint, inscribe-transfer, transfer, send, receive
+ """
+ return self._make_request(
+ f"brc20/{ticker}/history",
+ {"type": type, "start": start, "height": height, "limit": limit},
+ )
def get_history_by_height(self, height, start=0, limit=100):
- return self._make_request(f'brc20/history-by-height/{height}', {'start': start, 'limit': limit})
+ return self._make_request(
+ f"brc20/history-by-height/{height}", {"start": start, "limit": limit}
+ )
def get_brc20_tx_history(self, ticker, txid, type, start=0, limit=100):
- return self._make_request(f'brc20/{ticker}/tx/{txid}/history', {'type': type, 'start': start, 'limit': limit})
+ return self._make_request(
+ f"brc20/{ticker}/tx/{txid}/history",
+ {"type": type, "start": start, "limit": limit},
+ )
def get_address_brc20_summary(self, address, start=0, limit=100):
- return self._make_request(f'address/{address}/brc20/summary', {'start': start, 'limit': limit})
+ return self._make_request(
+ f"address/{address}/brc20/summary", {"start": start, "limit": limit}
+ )
def get_address_brc20_summary_by_height(self, address, height, start=0, limit=100):
- return self._make_request(f'address/{address}/brc20/summary-by-height/{height}', {'start': start, 'limit': limit})
+ return self._make_request(
+ f"address/{address}/brc20/summary-by-height/{height}",
+ {"start": start, "limit": limit},
+ )
def get_address_brc20_ticker_info(self, address, ticker):
- return self._make_request(f'address/{address}/brc20/{ticker}/info')
+ return self._make_request(f"address/{address}/brc20/{ticker}/info")
def get_address_brc20_history(self, address, start=0, limit=100):
- return self._make_request(f'address/{address}/brc20/history', {'start': start, 'limit': limit})
+ return self._make_request(
+ f"address/{address}/brc20/history", {"start": start, "limit": limit}
+ )
- def get_address_brc20_history_by_ticker(self, address, ticker, type, start=0, limit=100):
- '''
+ def get_address_brc20_history_by_ticker(
+ self, address, ticker, type, start=0, limit=100
+ ):
+ """
type: inscribe-deploy, inscribe-mint, inscribe-transfer, transfer, send, receive
- '''
- return self._make_request(f'address/{address}/brc20/{ticker}/history', {'type': type, 'start': start, 'limit': limit})
+ """
+ return self._make_request(
+ f"address/{address}/brc20/{ticker}/history",
+ {"type": type, "start": start, "limit": limit},
+ )
def get_transferable_inscriptions(self, address, ticker):
- return self._make_request(f'address/{address}/brc20/{ticker}/transferable-inscriptions')
-
+ return self._make_request(
+ f"address/{address}/brc20/{ticker}/transferable-inscriptions"
+ )
+
+
def main():
unisat_api = UnisatAPI()
# print(unisat_api.get_best_block_height().json())
response = unisat_api.get_brc20_ticker_history("ordi", 826827, "inscribe-transfer")
print(response.json()["data"])
parent_directory = os.path.dirname(os.path.abspath(__file__))
- json_directory = os.path.join(parent_directory, 'json')
+ json_directory = os.path.join(parent_directory, "json")
os.makedirs(json_directory, exist_ok=True)
- json_file_path = os.path.join(json_directory, 'get_brc20_tx_history.json')
- with open(json_file_path, 'w') as file:
+ json_file_path = os.path.join(json_directory, "get_brc20_tx_history.json")
+ with open(json_file_path, "w") as file:
json.dump(response.json()["data"], file, indent=4)
+
if __name__ == "__main__":
- main()
\ No newline at end of file
+ main()
diff --git a/apis/utils.py b/apis/utils.py
index 3455764..63ba715 100644
--- a/apis/utils.py
+++ b/apis/utils.py
@@ -10,6 +10,7 @@
class MissingDataException(Exception):
"""Raised when the expected data is missing in an API response"""
+
pass
@@ -18,13 +19,11 @@ def convert_timestamp_to_unixtime(timestamp):
Takes a timestamp e.g. '2022-08-11T09:10:12.364Z' and
returns a unix time 1660209012.364
"""
- unix_datetime = datetime.datetime.strptime(
- timestamp, '%Y-%m-%dT%H:%M:%S.%f%z'
- )
+ unix_datetime = datetime.datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S.%f%z")
return unix_datetime.timestamp()
-def create_market_cap_database(db_path: str = 'data.db') -> None:
+def create_market_cap_database(db_path: str = "data.db") -> None:
"""
Creates a SQLite database (if not exists) to store market cap data.
@@ -45,8 +44,7 @@ def create_market_cap_database(db_path: str = 'data.db') -> None:
def store_market_cap_data(
- market_data: Dict[float, Dict[str, Any]],
- source: str, db_path: str = 'data.db'
+ market_data: Dict[float, Dict[str, Any]], source: str, db_path: str = "data.db"
) -> None:
"""
Stores market cap data into the SQLite database.
@@ -62,14 +60,13 @@ def store_market_cap_data(
cursor.execute(
"INSERT INTO market_cap_data (name, market_cap, last_updated_time, load_time, source)"
"VALUES (?, ?, ?, ?, ?)",
- (md['name'], market_cap, md['last_updated'], int(time.time()), source))
+ (md["name"], market_cap, md["last_updated"], int(time.time()), source),
+ )
conn.commit()
conn.close()
-def handle_request_errors(
- func: Callable[..., Any]
-) -> Callable[..., Optional[Any]]:
+def handle_request_errors(func: Callable[..., Any]) -> Callable[..., Optional[Any]]:
"""
Decorator function to handle request errors.
@@ -79,6 +76,7 @@ def handle_request_errors(
Returns:
Callable[..., Optional[Any]]: The decorated function.
"""
+
@wraps(func)
def wrapper(*args, **kwargs):
try:
@@ -87,6 +85,7 @@ def wrapper(*args, **kwargs):
print("Error occurred while making the API request:", str(e))
print("Warning: Continuing with the rest of the execution.")
return None
+
return wrapper
diff --git a/constants.py b/constants.py
index f773c56..2546a17 100644
--- a/constants.py
+++ b/constants.py
@@ -3,98 +3,111 @@
from datetime import datetime
from pathlib import Path
-DEBUG = True #show debug messages in CLI
+DEBUG = True # show debug messages in CLI
WEBSERVER_THREADS = 1
-HEADER = '\033[95m'
-OKBLUE = '\033[94m'
-OKCYAN = '\033[96m'
-OKGREEN = '\033[92m'
-WARNING = '\033[93m'
-FAIL = '\033[91m'
-ENDC = '\033[0m'
-BOLD = '\033[1m'
-UNDERLINE = '\033[4m'
-NOUNDERLINE = '\033[0m'
-
-DATA_DIR = 'data'
-TEST_DIR = 'test'
-LOGGING_FILE = 'data_feeds.db'
-DATEFORMAT = '%Y-%m-%d %H:%M:%S.%f %z'
-DATA_EXT = '.csv'
-LINE_START = '>'
-
-FEED_NAME = 'feed_name'
-DATA_POINT = 'data_point'
-TIME_STAMP = 'time_stamp'
+HEADER = "\033[95m"
+OKBLUE = "\033[94m"
+OKCYAN = "\033[96m"
+OKGREEN = "\033[92m"
+WARNING = "\033[93m"
+FAIL = "\033[91m"
+ENDC = "\033[0m"
+BOLD = "\033[1m"
+UNDERLINE = "\033[4m"
+NOUNDERLINE = "\033[0m"
+
+DATA_DIR = "data"
+TEST_DIR = "test"
+LOGGING_FILE = "data_feeds.db"
+DATEFORMAT = "%Y-%m-%d %H:%M:%S.%f %z"
+DATA_EXT = ".csv"
+LINE_START = ">"
+
+FEED_NAME = "feed_name"
+DATA_POINT = "data_point"
+TIME_STAMP = "time_stamp"
PROJECT_PATH = Path(os.path.dirname(os.path.realpath(__file__)))
DATA_PATH = PROJECT_PATH / DATA_DIR
TEST_PATH = PROJECT_PATH / TEST_DIR
LOGGING_PATH = DATA_PATH / LOGGING_FILE
-LOGGING_FORMAT = ('%(asctime)s:%(thread)d - %(name)s - %(levelname)s - %(message)s')
+LOGGING_FORMAT = "%(asctime)s:%(thread)d - %(name)s - %(levelname)s - %(message)s"
+
def start_message(feed):
- return f'\n{HEADER}Starting {UNDERLINE}{feed.NAME}{NOUNDERLINE} {HEADER}data feed!{ENDC}'
+ return f"\n{HEADER}Starting {UNDERLINE}{feed.NAME}{NOUNDERLINE} {HEADER}data feed!{ENDC}"
+
def stop_message(feed):
- return f'\n{OKCYAN}Shutting down {UNDERLINE}{feed.NAME}{NOUNDERLINE} {OKCYAN}...{ENDC}'
+ return (
+ f"\n{OKCYAN}Shutting down {UNDERLINE}{feed.NAME}{NOUNDERLINE} {OKCYAN}...{ENDC}"
+ )
+
def init_time_message(cls):
- return f'\nSiwa init time: {datetime.fromtimestamp(cls.init_time).strftime(DATEFORMAT)}'
+ return f"\nSiwa init time: {datetime.fromtimestamp(cls.init_time).strftime(DATEFORMAT)}"
+
get_color = lambda x: OKGREEN if x else FAIL
-get_word = lambda x: '' if x else 'not '
+get_word = lambda x: "" if x else "not "
+
def get_starttime_string(feed):
- if (hasattr(feed, 'START_TIME') and feed.START_TIME):
- #ensure START_TIME exists and is not None before trying to convert
+ if hasattr(feed, "START_TIME") and feed.START_TIME:
+ # ensure START_TIME exists and is not None before trying to convert
dt_from_timestamp = datetime.fromtimestamp(feed.START_TIME)
human_readable_time_str = dt_from_timestamp.strftime(DATEFORMAT)
- return f'{human_readable_time_str}'
+ return f"{human_readable_time_str}"
else:
- #if START_TIME not initialized (i.e. because "start gauss" not issued yet)
- #(e.g. when calling `status` command before having ever started a feed)
- return f'[NEVER]'
+ # if START_TIME not initialized (i.e. because "start gauss" not issued yet)
+ # (e.g. when calling `status` command before having ever started a feed)
+ return f"[NEVER]"
+
def status_message(feed):
x = feed.ACTIVE
- return f'{get_color(x)}{feed.NAME}{ENDC} with id {feed.ID} is {get_word(x)}active, with {feed.COUNT} data points served since {get_starttime_string(feed)}'
+ return f"{get_color(x)}{feed.NAME}{ENDC} with id {feed.ID} is {get_word(x)}active, with {feed.COUNT} data points served since {get_starttime_string(feed)}"
+
#################### COINGECKo####################
-PRICE = 'current_price'
+PRICE = "current_price"
-#ids
-USDC = 'usd-coin'
-BUSD = 'binance-usd'
-TETHER = 'tether'
-DAI = 'dai'
-DOGE = 'dogecoin'
-SHIBA = 'shiba-inu'
-BABYDOGE = 'baby-doge-coin'
-DOGELON = 'dogelon-mars'
-SHIBASWAP = 'bone-shibaswap'
+# ids
+USDC = "usd-coin"
+BUSD = "binance-usd"
+TETHER = "tether"
+DAI = "dai"
+DOGE = "dogecoin"
+SHIBA = "shiba-inu"
+BABYDOGE = "baby-doge-coin"
+DOGELON = "dogelon-mars"
+SHIBASWAP = "bone-shibaswap"
#################### WEB3 ####################
-#CHAINS
-ARBITRUM_GOERLI = 'arbitrum_goerli'
-ARBITRUM_MAINNET = 'arbitrum_mainnet'
-ETHEREUM_MAINNET = 'ethereum_mainnet'
+# CHAINS
+ARBITRUM_GOERLI = "arbitrum_goerli"
+ARBITRUM_MAINNET = "arbitrum_mainnet"
+ETHEREUM_MAINNET = "ethereum_mainnet"
# POKT
-POKT_PORTAL_ID = 'a609ace3fe0c00927e127927'
-POKT_ARBITRUM = 'arbitrum-one'
-POKT_ETHEREUM = 'eth-mainnet'
+POKT_PORTAL_ID = "a609ace3fe0c00927e127927"
+POKT_ARBITRUM = "arbitrum-one"
+POKT_ETHEREUM = "eth-mainnet"
# RPC Endpoints
-ARBITRUM_GOERLI_RPC = 'https://goerli-rollup.arbitrum.io/rpc'
-ARBITRUM_MAINNET_RPC = f'https://{POKT_ARBITRUM}.gateway.pokt.network/v1/lb/{POKT_PORTAL_ID}'
-ETHEREUM_MAINNET_RPC = f'https://{POKT_ETHEREUM}.gateway.pokt.network/v1/lb/{POKT_PORTAL_ID}'
+ARBITRUM_GOERLI_RPC = "https://goerli-rollup.arbitrum.io/rpc"
+ARBITRUM_MAINNET_RPC = (
+ f"https://{POKT_ARBITRUM}.gateway.pokt.network/v1/lb/{POKT_PORTAL_ID}"
+)
+ETHEREUM_MAINNET_RPC = (
+ f"https://{POKT_ETHEREUM}.gateway.pokt.network/v1/lb/{POKT_PORTAL_ID}"
+)
# Addresses:
-TRANSLUCENT_GAUSS_ARBITRUM_GOERLI = '0xB39AC20b8b0C840a863ceB58A29b597022d98Bf5'#'0x77f85f243dCd2A69F20c2A98F1ef993DC4492A51'
+TRANSLUCENT_GAUSS_ARBITRUM_GOERLI = "0xB39AC20b8b0C840a863ceB58A29b597022d98Bf5" #'0x77f85f243dCd2A69F20c2A98F1ef993DC4492A51'
# TRANSLUCENT_GAUSS_ARBITRUM_MAINNET = None
# ABIs below
diff --git a/exchanges/__init__.py b/exchanges/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/exchanges/constants/__init__.py b/exchanges/constants/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/exchanges/constants/urls.py b/exchanges/constants/urls.py
new file mode 100644
index 0000000..f0627c6
--- /dev/null
+++ b/exchanges/constants/urls.py
@@ -0,0 +1,6 @@
+BINANCE_API_OPTIONS_URL = "https://eapi.binance.com"
+BINANCE_API_FUTURES_URL = "https://fapi.binance.com"
+BINANCE_API_SPOT_URL = "https://api.binance.com"
+ByBit_API_URL = "https://api.bybit.com/v2/public/symbols"
+OKX_API_URL = "https://www.okex.com/api/spot/v3/instruments"
+KRAKEN_API_URL = "https://api.kraken.com/0/public/AssetPairs"
diff --git a/exchanges/constants/utils.py b/exchanges/constants/utils.py
new file mode 100644
index 0000000..fd16dc9
--- /dev/null
+++ b/exchanges/constants/utils.py
@@ -0,0 +1,5 @@
+DEBUG_LIMIT = None # None or int value
+SPREAD_MULTIPLIER = 10
+SPREAD_MIN = 0.0005
+RANGE_MULT = 2.5
+Index_Maturity = 30 / 365
diff --git a/exchanges/exchange_manager.py b/exchanges/exchange_manager.py
new file mode 100644
index 0000000..da5d8c9
--- /dev/null
+++ b/exchanges/exchange_manager.py
@@ -0,0 +1,77 @@
+import json
+from typing import Tuple
+
+import ccxt
+import logging
+
+import pandas as pd
+from pandas import DataFrame
+
+from exchanges.fetchers.binance_fetcher import BinanceFetcher
+from exchanges.handlers.future_and_options_handler import MergeMarketHandler
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+class ExchangeManager:
+ def __init__(self, exchange_id, pairs_to_load, market_types):
+ self.exchange_id = exchange_id
+ self.pairs_to_load = pairs_to_load
+ self.market_types = market_types
+ self.exchange = getattr(ccxt, exchange_id)()
+ self.binance_fetcher = BinanceFetcher()
+ self.merge_market_handler = MergeMarketHandler(self.exchange, market_types)
+ self.options_data = pd.DataFrame()
+ self.futures_data = pd.DataFrame()
+
+ def fetch_binance_symbols(self):
+ binance_option_symbols = self.binance_fetcher.fetch_options_symbols()
+ return binance_option_symbols
+
+ def load_specific_pairs(self) -> tuple[DataFrame, DataFrame] | None | DataFrame:
+ try:
+ if self.exchange_id == "binance":
+ binance_option_symbols = self.fetch_binance_symbols()
+ data = {
+ "BTC/USD:BTC": {
+ "option": binance_option_symbols,
+ "future": None,
+ }
+ }
+ return self.handle_market_type(data)
+
+ all_markets = self.exchange.load_markets()
+ markets_df = pd.DataFrame(all_markets).T
+ filtered_markets = self.filter_markets(markets_df)
+ return self.handle_market_type(filtered_markets)
+ except Exception as e:
+ logger.error(f"Error loading specific pairs: {e}")
+ return pd.DataFrame()
+
+ def filter_markets(self, markets_df: pd.DataFrame) -> dict:
+ filtered_markets = {}
+ for pair in self.pairs_to_load:
+ base, quote = pair.split(":")[0].split("/")
+ for market_type in self.market_types:
+ filtered_df = markets_df[
+ (markets_df["base"] == base)
+ & (markets_df["quote"] == quote)
+ & (markets_df["type"] == market_type)
+ ]
+ symbols = filtered_df["symbol"].tolist()
+ if pair not in filtered_markets:
+ filtered_markets[pair] = {}
+ filtered_markets[pair][market_type] = symbols
+ return filtered_markets
+
+ def handle_market_type(
+ self, loaded_markets: dict
+ ) -> tuple[DataFrame, DataFrame] | None:
+ dataframe = None
+ for pair in self.pairs_to_load:
+ future_symbols = loaded_markets.get(pair, {}).get("future", [])
+ option_symbols = loaded_markets.get(pair, {}).get("option", [])
+ dataframe = self.merge_market_handler.handle(option_symbols, future_symbols)
+
+ return dataframe
diff --git a/exchanges/fetchers/binance_fetcher.py b/exchanges/fetchers/binance_fetcher.py
new file mode 100644
index 0000000..f96a4f9
--- /dev/null
+++ b/exchanges/fetchers/binance_fetcher.py
@@ -0,0 +1,118 @@
+import pandas as pd
+
+from exchanges.constants.urls import (
+ BINANCE_API_OPTIONS_URL,
+ BINANCE_API_FUTURES_URL,
+ BINANCE_API_SPOT_URL,
+)
+
+import logging
+import requests
+
+# Assuming BINANCE_API_OPTIONS_URL and BINANCE_API_FUTURES_URL are defined elsewhere
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+class BinanceFetcher:
+ @staticmethod
+ def get_response(url):
+ try:
+ with requests.Session() as session:
+ response = session.get(url)
+ if response.status_code == 200:
+ return response.json()
+ else:
+ logger.error(
+ f"Failed to fetch data from {url}: {response.status_code}"
+ )
+ return None
+ except Exception as e:
+ logger.error(f"Exception occurred while fetching data from {url}: {e}")
+ return None
+
+ @staticmethod
+ def fetch_options_symbols():
+ data = BinanceFetcher.get_response(
+ BINANCE_API_OPTIONS_URL + "/eapi/v1/exchangeInfo"
+ )["optionSymbols"]
+ data_df = pd.DataFrame(data)
+ # all symbols with BTC-
+ symbols = data_df["symbol"].loc[data_df["symbol"].str.contains("BTC-")]
+ return symbols.tolist()
+
+ @staticmethod
+ def fetch_futures_symbols():
+ url = f"{BINANCE_API_FUTURES_URL}/fapi/v1/premiumIndex"
+ data = BinanceFetcher.get_response(url)
+ if data:
+ return [
+ res["symbol"] for res in data if "BTCUSDT_" in res.get("symbol", "")
+ ]
+ return []
+
+ @staticmethod
+ def fetch_mark_price_futures():
+ symbols = BinanceFetcher.fetch_futures_symbols()
+ mark_prices = [] # This will hold dictionaries
+ for symbol in symbols:
+ data = BinanceFetcher.get_response(
+ BINANCE_API_FUTURES_URL + f"/fapi/v1/depth?symbol={symbol}"
+ )
+
+ bids_df = pd.DataFrame(data["bids"], columns=["price", "quantity"]).astype(
+ {"price": "float"}
+ )
+ asks_df = pd.DataFrame(data["asks"], columns=["price", "quantity"]).astype(
+ {"price": "float"}
+ )
+
+ # Get maximum bid and minimum ask
+ best_bid = bids_df["price"].max()
+ best_ask = asks_df["price"].min()
+
+ forward_price = (best_bid + best_ask) / 2
+ expiry = symbol.split("_")[1]
+
+ mark_prices.append(
+ {
+ "symbol": symbol,
+ "forward_price": forward_price,
+ "expiry": expiry,
+ }
+ )
+
+ mark_prices_df = pd.DataFrame(mark_prices)
+ return mark_prices_df
+
+ @staticmethod
+ def fetch_mark_price_options():
+ mark_prices_options = BinanceFetcher.get_response(
+ BINANCE_API_OPTIONS_URL + "/eapi/v1/mark"
+ )
+ mark_prices_options_df = pd.DataFrame(mark_prices_options)
+ mark_prices_options_df = mark_prices_options_df.loc[
+ mark_prices_options_df["symbol"].str.contains("BTC-")
+ ]
+
+ return mark_prices_options_df
+
+ @staticmethod
+ def fetch_mark_price_options():
+ mark_prices_options = BinanceFetcher.get_response(
+ BINANCE_API_OPTIONS_URL + "/eapi/v1/mark"
+ )
+ mark_prices_options_df = pd.DataFrame(mark_prices_options)
+ mark_prices_options_df = mark_prices_options_df.loc[
+ mark_prices_options_df["symbol"].str.contains("BTC-")
+ ]
+
+ return mark_prices_options_df
+
+ @staticmethod
+ def fetch_spot_price(symbol: str = "BTCUSDT"):
+ spot_price = BinanceFetcher.get_response(
+ BINANCE_API_SPOT_URL + f"/api/v3/ticker/price?symbol={symbol}"
+ )
+ return float(spot_price["price"])
diff --git a/exchanges/fetchers/future_fetcher.py b/exchanges/fetchers/future_fetcher.py
new file mode 100644
index 0000000..3285514
--- /dev/null
+++ b/exchanges/fetchers/future_fetcher.py
@@ -0,0 +1,79 @@
+from datetime import datetime
+import ccxt
+import numpy as np
+import pandas as pd
+
+
+class FutureFetcher:
+ def __init__(self, exchange):
+ self.exchange = exchange
+
+ def fetch_future_market_symbols(self, symbol: str) -> list[str]:
+ load_markets = self.exchange.load_markets()
+ load_markets_df = pd.DataFrame(load_markets).transpose()
+ future_symbols = load_markets_df[
+ (load_markets_df["future"] == True)
+ & (load_markets_df["symbol"].str.contains(f"{symbol}/USD"))
+ & (load_markets_df["symbol"].str.contains(f":{symbol}"))
+ ].index.to_list()
+ return future_symbols
+
+ def fetch_future_orderbook(self, symbol: str) -> dict:
+ order_book = self.exchange.fetch_order_book(symbol)
+ bids_df = pd.DataFrame(
+ order_book["bids"], columns=["price", "quantity"]
+ ).astype({"price": "float"})
+ asks_df = pd.DataFrame(
+ order_book["asks"], columns=["price", "quantity"]
+ ).astype({"price": "float"})
+ best_bid = bids_df["price"].max()
+ best_ask = asks_df["price"].min()
+
+ forward_price = (best_bid + best_ask) / 2
+ expiry = symbol.split("-")[1]
+ return {
+ "symbol": symbol,
+ "forward_price": forward_price,
+ "expiry": expiry,
+ }
+
+ def fetch_spot_price(self, symbol: str = "BTC/USDT"):
+ ticker = self.exchange.fetch_ticker(symbol)
+ return ticker["last"]
+
+ def fetch_implied_interest_rate(self, symbol: str) -> dict:
+ orderbook = self.fetch_future_orderbook(symbol)
+ forward_price = orderbook["forward_price"]
+ expiry_str = orderbook["expiry"]
+
+ expiry_date = datetime.strptime(expiry_str, "%y%m%d") # Corrected format here
+ today = datetime.now()
+ days_to_expiry = (expiry_date - today).days
+ years_to_expiry = days_to_expiry / 365.25
+
+ spot_price = self.fetch_spot_price()
+
+ if years_to_expiry == 0:
+ implied_interest_rate = 0
+ else:
+ implied_interest_rate = (
+ np.log(forward_price) - np.log(spot_price)
+ ) / years_to_expiry
+
+ return {
+ "symbol": symbol,
+ "expiry": expiry_str,
+ "implied_interest_rate": implied_interest_rate,
+ "days_to_expiry": days_to_expiry,
+ "years_to_expiry": years_to_expiry,
+ }
+
+ def fetch_all_implied_interest_rates(self, symbols: list[str]) -> pd.DataFrame:
+ data = [self.fetch_implied_interest_rate(symbol) for symbol in symbols]
+ rates_data = pd.DataFrame(data)
+
+ rates_data["expiry"] = pd.to_datetime(rates_data["expiry"], format="%y%m%d")
+ # expiry in human readable format
+ rates_data["expiry"] = rates_data["expiry"].dt.strftime("%Y-%m-%d")
+
+ return rates_data
diff --git a/exchanges/fetchers/option_fetcher.py b/exchanges/fetchers/option_fetcher.py
new file mode 100644
index 0000000..1e998b5
--- /dev/null
+++ b/exchanges/fetchers/option_fetcher.py
@@ -0,0 +1,161 @@
+import logging
+from datetime import time, datetime
+
+import ccxt
+import numpy as np
+import pandas as pd
+import requests
+from pandas import Timestamp
+from pandas._libs import NaTType
+
+from exchanges.fetchers.binance_fetcher import BinanceFetcher
+
+
+class OptionFetcher:
+ def __init__(self, exchange):
+ self.exchange = exchange
+ self.binance_fetcher = BinanceFetcher()
+
+ def fetch_market_data(
+ self, market_symbols: list[str], exchange_name: str
+ ) -> pd.DataFrame:
+ """
+ Fetches market data for a given list of market symbols from a specified exchange and processes it using pandas.
+ Args:
+ market_symbols: A list of symbols in the format recognized by the exchange.
+ exchange_name: String representing the exchange name ('deribit', 'okx', 'binance').
+ Returns:
+ pd.DataFrame: DataFrame with processed market data for each option contract.
+ """
+ try:
+ all_tickers = self.exchange.fetch_tickers(market_symbols)
+ tickers_df = pd.DataFrame(all_tickers).transpose()
+ if exchange_name == "Deribit":
+ return self.process_deribit_data(tickers_df)
+ elif exchange_name == "OKX":
+ return self.process_okx_data(tickers_df)
+ elif exchange_name == "Binance":
+ return self.process_binance_data(tickers_df)
+ else:
+ logging.error(f"Unsupported exchange: {exchange_name}")
+ return pd.DataFrame()
+ except Exception as e:
+ logging.error(f"Error fetching tickers from {exchange_name}: {e}")
+ return pd.DataFrame()
+
+ def process_deribit_data(self, df: pd.DataFrame) -> pd.DataFrame:
+ info_df = pd.json_normalize(df["info"])
+ df = df.reset_index(drop=True)
+
+ df["bid"] = pd.to_numeric(df["bid"], errors="coerce").fillna(0.0)
+ df["ask"] = pd.to_numeric(df["ask"], errors="coerce").fillna(0.0)
+
+ df["mark_price"] = pd.to_numeric(info_df["mark_price"], errors="coerce").fillna(
+ 0.0
+ )
+
+ underlying_prices = pd.to_numeric(
+ info_df["underlying_price"], errors="coerce"
+ ).fillna(0.0)
+
+ df["bid"] *= underlying_prices
+ df["ask"] *= underlying_prices
+ df["mark_price"] *= underlying_prices
+
+ return df[
+ [
+ "symbol",
+ "bid",
+ "ask",
+ "mark_price",
+ ]
+ ]
+
+ def process_okx_data(self, df: pd.DataFrame) -> pd.DataFrame:
+ response = requests.get(
+ "https://www.okx.com/api/v5/public/mark-price?instType=OPTION"
+ )
+ mark_prices = response.json()["data"]
+ mark_prices_df = pd.DataFrame(mark_prices)
+ mark_prices_df["symbol"] = mark_prices_df["instId"].apply(
+ self.convert_inst_id_to_symbol
+ )
+ mark_prices_df.rename(columns={"markPx": "mark_price"}, inplace=True)
+ df["underlying_price"] = self.exchange.fetch_ticker("BTC/USDT")["last"]
+ df["bid"] *= df["underlying_price"]
+ df["ask"] *= df["underlying_price"]
+
+ df = df.merge(mark_prices_df[["symbol", "mark_price"]], on="symbol", how="left")
+ df["mark_price"] = pd.to_numeric(df["mark_price"], errors="coerce").fillna(0.0)
+ df["mark_price"] *= df["underlying_price"]
+
+ return df[
+ [
+ "symbol",
+ "bid",
+ "ask",
+ "mark_price",
+ ]
+ ]
+
+ def process_binance_data(self, df: pd.DataFrame) -> pd.DataFrame:
+ df["symbol"] = df["symbol"].apply(self.convert_usdt_to_usd)
+ df["bid"] = df["info"].apply(lambda x: float(x.get("bidPrice", 0)))
+ df["ask"] = df["info"].apply(lambda x: float(x.get("askPrice", 0)))
+
+ mark_price = self.binance_fetcher.fetch_mark_price_options()
+ mark_price["symbol"] = mark_price["symbol"].apply(self.transform_symbol_format)
+ mark_price.rename(columns={"markPrice": "mark_price"}, inplace=True)
+ mark_price["mark_price"] = pd.to_numeric(
+ mark_price["mark_price"], errors="coerce"
+ ).fillna(0.0)
+
+ df = df.merge(mark_price, on="symbol", how="left")
+
+ return df[
+ [
+ "symbol",
+ "bid",
+ "ask",
+ "mark_price",
+ ]
+ ]
+
+ @staticmethod
+ def convert_inst_id_to_symbol(inst_id: str) -> str:
+ parts = inst_id.split("-")
+ currency = f"{parts[0]}/{parts[1]}" # e.g., BTC/USD
+ date = parts[2][:2] + parts[2][2:4] + parts[2][4:] # Reformat date
+ strike_price = parts[3]
+ option_type = parts[4]
+
+ symbol = f"{currency}:{parts[0]}-{date}-{strike_price}-{option_type}"
+ return symbol
+
+ @staticmethod
+ def transform_symbol_format(symbol):
+ parts = symbol.split("-")
+ return f"{parts[0]}/USD:USD-{parts[1]}-{parts[2]}-{parts[3]}"
+
+ @staticmethod
+ def convert_usdt_to_usd(symbol: str) -> str:
+ parts = symbol.split(":")
+ converted_parts = [part.replace("USDT", "USD") for part in parts]
+ converted_symbol = ":".join(converted_parts)
+ return converted_symbol
+
+ @staticmethod
+ def get_strike_price_and_option_type(symbol: str) -> tuple[str, str]:
+ parts = symbol.split("-")
+ strike_price = parts[-2]
+ option_type = parts[-1]
+ return strike_price, option_type
+
+
+if __name__ == "__main__":
+ exchange = ccxt.okx()
+ option_fetcher = OptionFetcher(exchange)
+ market_symbols = ["BTC-240329-15000-P", "BTC-240329-20000-C"]
+ exchange_name = "OKX"
+ option_fetcher.fetch_market_data(market_symbols, exchange_name)
+ print("Market data fetched and processed successfully!")
diff --git a/exchanges/filtering.py b/exchanges/filtering.py
new file mode 100644
index 0000000..2a246c6
--- /dev/null
+++ b/exchanges/filtering.py
@@ -0,0 +1,269 @@
+class Filtering:
+ def __init__(self, options_data):
+ self.options_data = options_data
+ # self.F = F
+ # self.RANGE_MULT = RANGE_MULT
+ # self.minimum_bid_threshold = minimum_bid_threshold
+ self.Fimp = None
+ self.K_ATM = None
+
+ def calculate_Fimp(self, call_data, put_data):
+ """
+ Calculate the implied forward price based on call and put option data.
+
+ Parameters:
+ call_data (list): List of call option data.
+ put_data (list): List of put option data.
+
+ Returns:
+ float: Implied forward price.
+ """
+ if not call_data or not put_data:
+ return 0
+
+ implied_forward_price = None
+
+ for call_option in call_data:
+ call_price = call_option.get("mid_price")
+ if call_price is None:
+ continue
+
+ min_price_diff = float("inf")
+ selected_put_option = None
+
+ for put_option in put_data:
+ put_price = put_option.get("mid_price")
+ if put_price is None:
+ continue
+
+ price_diff = abs(call_price - put_price)
+ if price_diff < min_price_diff:
+ min_price_diff = price_diff
+ selected_put_option = put_option
+
+ if selected_put_option is not None:
+ strike_price = self.find_min_difference_strike(call_data, put_data)
+ forward_price = self.calculate_forward_price(call_option, put_option)
+ implied_forward_price = strike_price + forward_price * (
+ call_price - put_price
+ )
+ break
+
+ return implied_forward_price if implied_forward_price is not None else 0
+
+ def extract_strike_price(self, option_symbol):
+ """
+ Extract the strike price from the option symbol.
+
+ Parameters:
+ option_symbol (str): Symbol representing the option.
+
+ Returns:
+ float: Strike price.
+ """
+ symbol_parts = option_symbol.split("-")
+ if len(symbol_parts) < 3:
+ return 0
+
+ strike_price_str = symbol_parts[-2]
+
+ try:
+ strike_price = float(strike_price_str)
+ return strike_price
+ except ValueError:
+ return 0
+
+ def calculate_forward_price(self, call_option, put_option):
+ """
+ Calculate the forward price based on call and put option data.
+
+ Parameters:
+ call_option (dict): Call option data.
+ put_option (dict): Put option data.
+
+ Returns:
+ float: Forward price.
+ """
+ call_price = call_option.get("mark_price")
+ put_price = put_option.get("mark_price")
+
+ if call_price is None or put_price is None:
+ return 0
+
+ average_mark_price = (call_price + put_price) / 2
+ return average_mark_price
+
+ def find_min_difference_strike(self, call_data, put_data):
+ """
+ Find the strike price with the minimum difference in mid prices between call and put options.
+
+ Parameters:
+ call_data (list): List of call option data.
+ put_data (list): List of put option data.
+
+ Returns:
+ float: Strike price with the minimum price difference.
+ """
+ min_diff_strike = None
+ min_price_diff = float("inf")
+
+ for call_option in call_data:
+ call_price = call_option.get("mid_price")
+ if call_price is None:
+ continue
+
+ for put_option in put_data:
+ put_price = put_option.get("mid_price")
+ if put_price is None:
+ continue
+
+ price_diff = abs(call_price - put_price)
+ if price_diff < min_price_diff:
+ min_price_diff = price_diff
+ min_diff_strike = self.extract_strike_price(
+ call_option.get("symbol")
+ )
+
+ return min_diff_strike
+
+ def set_ATM_strike(self, call_data, put_data):
+ """
+ Find the strike price with the minimum difference in mid prices between call and put options.
+
+ Parameters:
+ call_data (list): List of call option data.
+ put_data (list): List of put option data.
+
+ Returns:
+ float: Strike price with the minimum price difference.
+ """
+ # Extract strike prices from options_data
+ Fimp = self.calculate_Fimp(call_data, put_data)
+
+ # Find the strikes less than Fimp
+ option_strikes = [
+ self.extract_strike_price(option["symbol"]) for option in self.options_data
+ ]
+ strikes_less_than_Fimp = [strike for strike in option_strikes if strike < Fimp]
+
+ # Set the largest strike that is less than Fimp as ATM strike K_ATM for near and next-term options
+ self.K_ATM = max(strikes_less_than_Fimp)
+
+ return self.K_ATM
+
+ def select_OTM_options(self, call_data, put_data):
+ """
+ Select out-of-the-money (OTM) options with respect to the ATM strike price for each set of near and next-term options.
+ If both call and put options are selected for the same strike (i.e., K_ATM), then take the average of them.
+
+ Parameters:
+ call_data (list): List of call option data.
+ put_data (list): List of put option data.
+
+ Returns:
+ dict: Dictionary containing OTM options for each set of near and next-term options.
+ """
+ # Calculate K_ATM
+ K_ATM = self.set_ATM_strike(call_data, put_data)
+
+ # Filter strikes less than K_ATM
+ option_strikes = [
+ self.extract_strike_price(option["symbol"])
+ for option in call_data + put_data
+ ]
+ strikes_less_than_K_ATM = [
+ strike for strike in option_strikes if strike < K_ATM
+ ]
+
+ # Initialize a dictionary to store OTM options
+ OTM_options = {}
+
+ # Select OTM options for each set of near and next-term options
+ for strike in strikes_less_than_K_ATM:
+ if strike != K_ATM:
+ # For strikes different from K_ATM, select OTM options based on mid-price
+ call_OTM = next(
+ (
+ call
+ for call in call_data
+ if self.extract_strike_price(call["symbol"]) == strike
+ ),
+ None,
+ )
+ put_OTM = next(
+ (
+ put
+ for put in put_data
+ if self.extract_strike_price(put["symbol"]) == strike
+ ),
+ None,
+ )
+
+ if call_OTM and put_OTM:
+ call_price = call_OTM.get("mid_price", 0)
+ put_price = put_OTM.get("mid_price", 0)
+ OTM_options[strike] = (call_price + put_price) / 2
+ else:
+ # For K_ATM, take the average of both call and put prices if they exist
+ call_ATM = next(
+ (
+ call
+ for call in call_data
+ if self.extract_strike_price(call["symbol"]) == K_ATM
+ ),
+ None,
+ )
+ put_ATM = next(
+ (
+ put
+ for put in put_data
+ if self.extract_strike_price(put["symbol"]) == K_ATM
+ ),
+ None,
+ )
+
+ if call_ATM and put_ATM:
+ call_price = call_ATM.get("mid_price", 0)
+ put_price = put_ATM.get("mid_price", 0)
+ OTM_options[K_ATM] = (call_price + put_price) / 2
+
+ return OTM_options
+
+ def filter_options_by_strike_range(self):
+ # Calculate Kmin and Kmax
+ Kmin = self.Fimp / self.RANGE_MULT
+ Kmax = self.Fimp * self.RANGE_MULT
+
+ # Filter options within the strike range
+ filtered_options = [
+ option for option in self.options_data if Kmin < option["strike"] < Kmax
+ ]
+ return filtered_options
+
+ def filter_options_by_bid_price(self, options):
+ # Eliminate options after observing five consecutive bid prices below the threshold
+ filtered_options = []
+ consecutive_below_threshold = 0
+
+ for option in sorted(options, key=lambda x: x["strike"]):
+ if option["bid_price"] > self.minimum_bid_threshold:
+ filtered_options.append(option)
+ consecutive_below_threshold = 0
+ else:
+ consecutive_below_threshold += 1
+ if consecutive_below_threshold < 5:
+ filtered_options.append(option)
+ else:
+ break # Stop adding options once we hit five below the threshold
+
+ return filtered_options
+
+ def execute(self):
+ self.calculate_Fimp()
+ self.set_ATM_strike()
+ otm_options = self.select_OTM_options()
+ strike_filtered_options = self.filter_options_by_strike_range()
+ final_filtered_options = self.filter_options_by_bid_price(
+ strike_filtered_options
+ )
+ return final_filtered_options
diff --git a/exchanges/handlers/__init__.py b/exchanges/handlers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/exchanges/handlers/future_and_options_handler.py b/exchanges/handlers/future_and_options_handler.py
new file mode 100644
index 0000000..6773e92
--- /dev/null
+++ b/exchanges/handlers/future_and_options_handler.py
@@ -0,0 +1,37 @@
+from typing import List
+
+import pandas as pd
+
+from exchanges.fetchers.future_fetcher import FutureFetcher
+from exchanges.fetchers.option_fetcher import OptionFetcher
+from exchanges.processing import Processing
+
+
+class MergeMarketHandler:
+ def __init__(self, exchange, market_types):
+ self.exchange = exchange
+ self.future_data_fetcher = FutureFetcher(exchange)
+ self.options_data_fetcher = OptionFetcher(exchange)
+ self.market_types = market_types
+ self.processing = Processing()
+
+
+ """
+ Fetches the options and future market data for the given exchange and market types (e.g. "BTC").
+ """
+ def handle(
+ self, options_market: List[str]
+ ) -> tuple[pd.DataFrame, pd.DataFrame]:
+ options_data = self.options_data_fetcher.fetch_market_data(
+ options_market, str(self.exchange)
+ )
+ options_data = self.processing.eliminate_invalid_quotes(options_data)
+
+ """
+ First, we fetch the future market symbols for the given exchange and market types (e.g. "BTC").
+ Then, we fetch all the implied interest rates for the future market symbols.
+ """
+ futures_symbols = self.future_data_fetcher.fetch_future_market_symbols("BTC")
+ future_data = self.future_data_fetcher.fetch_all_implied_interest_rates(futures_symbols)
+
+ return options_data, future_data
diff --git a/exchanges/main.py b/exchanges/main.py
new file mode 100644
index 0000000..e9e097f
--- /dev/null
+++ b/exchanges/main.py
@@ -0,0 +1,36 @@
+import logging
+
+import pandas as pd
+import matplotlib.pyplot as plt
+
+from exchanges.managers.binance_manager import BinanceManager
+from exchanges.managers.okx_manager import OKXManager
+from exchanges.processing import Processing
+from managers.deribit_manager import DeribitManager
+
+# Configure logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+def main():
+ try:
+ deribit = DeribitManager(pairs_to_load=["BTC/USD:BTC"], market_types=["option"])
+ binance = BinanceManager(
+ pairs_to_load=["BTC/USD:BTC"], market_types=["option", "future"]
+ )
+ okx = OKXManager(pairs_to_load=["BTC/USD:BTC"], market_types=["option"])
+ global_orderbook_options = pd.DataFrame()
+ global_orderbook_futures = pd.DataFrame()
+
+ for manager in [binance, deribit]:
+ options, futures = manager.load_specific_pairs()
+ global_orderbook_options = pd.concat([global_orderbook_options, options]).reset_index(drop=True)
+ global_orderbook_futures = pd.concat([global_orderbook_futures, futures]).reset_index(drop=True)
+
+ except Exception as e:
+ logger.error(f"An unexpected error occurred in the main function: {e}")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/exchanges/managers/__init__.py b/exchanges/managers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/exchanges/managers/__pycache__/binance_manager.cpython-311.pyc b/exchanges/managers/__pycache__/binance_manager.cpython-311.pyc
new file mode 100644
index 0000000..3e7271b
Binary files /dev/null and b/exchanges/managers/__pycache__/binance_manager.cpython-311.pyc differ
diff --git a/exchanges/managers/binance_manager.py b/exchanges/managers/binance_manager.py
new file mode 100644
index 0000000..dd3f2af
--- /dev/null
+++ b/exchanges/managers/binance_manager.py
@@ -0,0 +1,6 @@
+from exchanges.exchange_manager import ExchangeManager
+
+
+class BinanceManager(ExchangeManager):
+ def __init__(self, pairs_to_load, market_types):
+ super().__init__("binance", pairs_to_load, market_types)
diff --git a/exchanges/managers/bybit_manager.py b/exchanges/managers/bybit_manager.py
new file mode 100644
index 0000000..07b7e8f
--- /dev/null
+++ b/exchanges/managers/bybit_manager.py
@@ -0,0 +1,6 @@
+from exchanges.exchange_manager import ExchangeManager
+
+
+class BybitManager(ExchangeManager):
+ def __init__(self, pairs_to_load, market_types):
+ super().__init__("bybit", pairs_to_load, market_types)
diff --git a/exchanges/managers/deribit_manager.py b/exchanges/managers/deribit_manager.py
new file mode 100644
index 0000000..42cb655
--- /dev/null
+++ b/exchanges/managers/deribit_manager.py
@@ -0,0 +1,6 @@
+from exchanges.exchange_manager import ExchangeManager
+
+
+class DeribitManager(ExchangeManager):
+ def __init__(self, pairs_to_load, market_types):
+ super().__init__("deribit", pairs_to_load, market_types)
diff --git a/exchanges/managers/okx_manager.py b/exchanges/managers/okx_manager.py
new file mode 100644
index 0000000..ca3d95e
--- /dev/null
+++ b/exchanges/managers/okx_manager.py
@@ -0,0 +1,6 @@
+from exchanges.exchange_manager import ExchangeManager
+
+
+class OKXManager(ExchangeManager):
+ def __init__(self, pairs_to_load, market_types):
+ super().__init__("okx", pairs_to_load, market_types)
diff --git a/exchanges/processing.py b/exchanges/processing.py
new file mode 100644
index 0000000..7d92998
--- /dev/null
+++ b/exchanges/processing.py
@@ -0,0 +1,227 @@
+import json
+from datetime import datetime
+
+import numpy as np
+import pandas as pd
+from scipy.interpolate import interp1d
+
+from exchanges.constants.utils import SPREAD_MIN, SPREAD_MULTIPLIER, RANGE_MULT
+
+
+class Processing:
+ @staticmethod
+ def calculate_yield_curve(dataframe):
+ """
+ Calculates the average interest rate for each expiry date in a pandas DataFrame.
+
+ Parameters:
+ - dataframe: A pandas DataFrame containing at least two columns: 'expiry' and 'implied_interest_rate'.
+
+ Returns:
+ - A pandas DataFrame containing the average implied interest rate for each unique expiry date.
+ """
+ dataframe = dataframe.sort_values(by="expiry", ascending=False)
+
+ grouped = (
+ dataframe.groupby("expiry")["implied_interest_rate"].mean().reset_index()
+ )
+
+ return grouped[
+ ["expiry", "implied_interest_rate", "days_to_expiry", "years_to_expiry"]
+ ]
+
+ @staticmethod
+ def build_interest_rate_term_structure(df):
+ # Group by expiry date and calculate the average implied interest rate for each expiry
+ interest_rate_term_structure = df.groupby("expiry")["rimp"].mean().reset_index()
+
+ # Rename columns for clarity
+ interest_rate_term_structure.rename(
+ columns={"rimp": "average_implied_interest_rate"}, inplace=True
+ )
+
+ return interest_rate_term_structure
+
+ @staticmethod
+ def filter_near_next_term_options(df):
+ df["expiry"] = df["symbol"].apply(
+ lambda x: datetime.strptime(x.split("-")[1], "%y%m%d")
+ )
+ index_maturity_days = 30
+ today = datetime.now()
+ near_term_options = df[(df["expiry"] - today).dt.days <= index_maturity_days]
+ next_term_options = df[(df["expiry"] - today).dt.days > index_maturity_days]
+ return near_term_options, next_term_options
+
+ @staticmethod
+ def eliminate_invalid_quotes(df):
+ df = df[
+ (df["ask"] > df["bid"])
+ & (df["mark_price"] >= df["bid"])
+ & (df["mark_price"] <= df["ask"])
+ & (df["mark_price"] > 0)
+ ]
+ return df
+
+ @staticmethod
+ def process_quotes(df: pd.DataFrame) -> pd.DataFrame:
+ df = df.copy()
+ df["bid_spread"] = df["mark_price"] - df["bid"]
+ df["ask_spread"] = df["ask"] - df["mark_price"]
+
+ df["bid_spread"] = df["bid_spread"].apply(lambda x: x if x > 0 else 0)
+ df["ask_spread"] = df["ask_spread"].apply(lambda x: x if x > 0 else 0)
+
+ # Calculate total spread
+ df["spread"] = df["bid_spread"] + df["ask_spread"]
+
+ MAS = df[["bid_spread", "ask_spread"]].min(axis=1) * SPREAD_MULTIPLIER
+
+ # Calculate GMS
+ GMS = SPREAD_MIN * SPREAD_MULTIPLIER
+
+ df = df[(df["spread"] <= GMS) | (df["spread"] <= MAS)]
+
+ df["strike"] = df["symbol"].apply(lambda x: int(x.split("-")[2]))
+ df["option_type"] = df["symbol"].apply(lambda x: x[-1])
+
+ df["mid_price"] = (df["bid"] + df["ask"]) / 2
+ return df
+
+ @staticmethod
+ def calculate_implied_forward_price(df):
+ calls = df[df["option_type"] == "C"]
+ puts = df[df["option_type"] == "P"]
+ combined = calls[["strike", "mid_price"]].merge(
+ puts[["strike", "mid_price"]], on="strike", suffixes=("_call", "_put")
+ )
+ combined["mid_price_diff"] = abs(
+ combined["mid_price_call"] - combined["mid_price_put"]
+ )
+ min_diff_strike = combined.loc[combined["mid_price_diff"].idxmin()]
+ forward_price = df.loc[
+ df["strike"] == min_diff_strike["strike"], "mark_price"
+ ].iloc[0]
+ Fimp = min_diff_strike["strike"] + forward_price * (
+ min_diff_strike["mid_price_call"] - min_diff_strike["mid_price_put"]
+ )
+ return Fimp
+
+ @staticmethod
+ def filter_and_sort_options(df, Fimp):
+ KATM = df[df["strike"] < Fimp]["strike"].max()
+ RANGE_MULT = 2.5
+ Kmin = Fimp / RANGE_MULT
+ Kmax = Fimp * RANGE_MULT
+ calls_otm = df[(df["strike"] > KATM) & (df["option_type"] == "C")]
+ puts_otm = df[(df["strike"] < KATM) & (df["option_type"] == "P")]
+ otm_combined = pd.concat([calls_otm, puts_otm])
+ otm_filtered = otm_combined[
+ (otm_combined["strike"] > Kmin) & (otm_combined["strike"] < Kmax)
+ ]
+ otm_sorted = otm_filtered.sort_values(by="strike")
+ tick_size = df[df["bid"] > 0]["bid"].min()
+ consecutive_threshold = 5
+ consecutive_count = 0
+ to_drop = []
+ for index, row in otm_sorted.iterrows():
+ if row["bid"] <= tick_size:
+ consecutive_count += 1
+ to_drop.append(index)
+ else:
+ consecutive_count = 0
+ if consecutive_count >= consecutive_threshold:
+ break
+ otm_final = otm_sorted.drop(to_drop)
+ otm_final["Fimp"] = Fimp
+ otm_final["KATM"] = KATM
+ # time to expiry in years
+ current_date = datetime.now()
+ otm_final["years_to_expiry"] = (
+ otm_final["expiry"] - current_date
+ ).dt.days / 365.25
+
+ return otm_final
+
+ @staticmethod
+ def calculate_wij(strike_prices_df, interest_rates_df):
+ interest_rates_df["expiry"] = pd.to_datetime(interest_rates_df["expiry"])
+ strike_prices_df["expiry"] = pd.to_datetime(strike_prices_df["expiry"])
+
+ strike_prices_df.sort_values(by=["expiry", "strike"], inplace=True)
+
+ merged_df = strike_prices_df.merge(
+ interest_rates_df, on="expiry", how="left", suffixes=("_x", "_y")
+ )
+
+ merged_df["K_prev"] = merged_df["strike"].shift(1)
+ merged_df["K_next"] = merged_df["strike"].shift(-1)
+
+ merged_df["Delta_K"] = (merged_df["K_next"] - merged_df["K_prev"]) / 2
+ merged_df["Delta_K"].fillna(method="bfill", inplace=True)
+ merged_df["Delta_K"].fillna(method="ffill", inplace=True)
+
+
+ merged_df["w_ij"] = (
+ np.exp(merged_df["implied_interest_rate"] * merged_df["years_to_expiry"])
+ * merged_df["Delta_K"]
+ ) / (merged_df["strike"] ** 2)
+ return merged_df
+
+ @staticmethod
+ def calculate_sigma_it_squared_for_all(w_ij_df):
+ T_i = w_ij_df['years_to_expiry'].mean()
+ F_i = w_ij_df['Fimp'].mean()
+ K_i_ATM = w_ij_df['KATM'].mean()
+
+ sigma_squared = (1 / T_i) * (
+ np.sum(0.5 * w_ij_df['w_ij'] * w_ij_df['mid_price']) -
+ ((F_i / K_i_ATM) - 1) ** 2 * len(w_ij_df)
+ )
+
+ return sigma_squared
+ @staticmethod
+ def find_missing_expiries(options_df, futures_df):
+ options_expiries = options_df["expiry"].unique()
+ futures_expiries = futures_df["expiry"].unique()
+ missing_expiries = sorted(list(set(options_expiries) - set(futures_expiries)))
+ return missing_expiries
+
+ @staticmethod
+ def interpolate_implied_interest_rates(futures_df, missing_expiries):
+ futures_df["expiry_ordinal"] = pd.to_datetime(futures_df["expiry"]).apply(
+ lambda x: x.toordinal()
+ )
+ missing_expiries_ordinal = [
+ pd.to_datetime(date).toordinal() for date in missing_expiries
+ ]
+
+ interp_func = interp1d(
+ futures_df["expiry_ordinal"],
+ futures_df["implied_interest_rate"],
+ kind="linear",
+ fill_value="extrapolate",
+ )
+
+ interpolated_rates = interp_func(missing_expiries_ordinal)
+
+ interpolated_rates_df = pd.DataFrame(
+ {"expiry": missing_expiries, "implied_interest_rate": interpolated_rates}
+ )
+
+ return interpolated_rates_df
+
+ @staticmethod
+ def calculate_delta_K(df):
+ df = df.sort_values(by="strike").reset_index(drop=True)
+ print(df)
+ delta_K = pd.Series(dtype=float)
+ delta_K[0] = df.loc[1, "strike"] - df.loc[0, "strike"]
+ delta_K[df.index[-1]] = (
+ df.loc[df.index[-1], "strike"] - df.loc[df.index[-1] - 1, "strike"]
+ )
+
+ for i in range(1, len(df) - 1):
+ delta_K[i] = (df.loc[i + 1, "strike"] - df.loc[i - 1, "strike"]) / 2
+
+ return delta_K
diff --git a/exchanges/raw_implied_variance.py b/exchanges/raw_implied_variance.py
new file mode 100644
index 0000000..0541292
--- /dev/null
+++ b/exchanges/raw_implied_variance.py
@@ -0,0 +1,177 @@
+import numpy as np
+
+
+class RawImpliedVariance:
+ def calculate_implied_variance(
+ self, F_i, K_i_ATM, strikes, option_prices, r_i, T_i, delta_K
+ ):
+ # Precompute constant
+ discount_factor = np.exp(r_i * T_i)
+
+ # Explicitly convert delta_K elements to float
+ delta_K = np.array(delta_K, dtype=float)
+
+ # Ensure that all arrays have the same length
+ min_length = min(len(strikes), len(option_prices), len(delta_K))
+ strikes = np.array(strikes[:min_length], dtype=float)
+ option_prices = np.array(option_prices[:min_length], dtype=float)
+
+ # Log-linear extrapolation
+ Kmin, Kmax = min(strikes), max(strikes)
+ extrapolated_strikes = np.logspace(np.log10(Kmin), np.log10(Kmax), num=1000)
+ extrapolated_option_prices = np.interp(
+ extrapolated_strikes, strikes, option_prices
+ )
+
+ # Log-linear piece-wise interpolation
+ interpolated_strikes = np.logspace(np.log10(Kmin), np.log10(Kmax), num=1000)
+ interpolated_option_prices = np.interp(
+ interpolated_strikes, strikes, option_prices
+ )
+
+ # Update strikes and option prices
+ strikes = np.concatenate([strikes, extrapolated_strikes, interpolated_strikes])
+ option_prices = np.concatenate(
+ [option_prices, extrapolated_option_prices, interpolated_option_prices]
+ )
+
+ # Reshape arrays to have the same shape for broadcasting
+ discount_factor = discount_factor.reshape((1,))
+ strikes = strikes.reshape((len(strikes), 1))
+ option_prices = option_prices.reshape((len(option_prices), 1))
+
+ # Vectorized operations with broadcasting
+ weights = discount_factor * (delta_K / strikes**2)
+ sum_term = np.sum(weights * option_prices)
+
+ # Calculate the implied variance using the formula
+ implied_variance = (1 / T_i) * (2 * sum_term - ((F_i / K_i_ATM) - 1) ** 2)
+
+ return implied_variance
+
+ def interpolate_variance(self, T_NEAR, T_NEXT, T_INDEX=30 / 365):
+ """
+ Calculate the weights for the near and next term variances based on the given times to maturity.
+
+ Parameters:
+ T_NEAR (list): List containing Time to maturity for the near term.
+ T_NEXT (list): List containing Time to maturity for the next term.
+ T_INDEX (list): List containing Time to maturity for the index.
+
+ Returns:
+ tuple: A tuple containing the weights for the near term (omega_NEAR) and the next term (omega_NEXT).
+ """
+ if len(T_NEAR) != len(T_NEXT) or len(T_NEXT) != len(T_INDEX):
+ raise ValueError("Input lists must have the same length")
+
+ omega_NEAR_t = [
+ (T_NEXT[i] - T_INDEX[i])
+ / (T_NEXT[i] - T_NEAR[i] + 1e-9)
+ / (T_INDEX[i] + 1e-9)
+ for i in range(len(T_NEAR))
+ ]
+ omega_NEXT_t = [
+ (T_INDEX[i] - T_NEAR[i])
+ / (T_NEXT[i] - T_NEAR[i] + 1e-9)
+ / (T_NEXT[i] + 1e-9)
+ for i in range(len(T_NEAR))
+ ]
+
+ return omega_NEAR_t, omega_NEXT_t
+
+ def calculate_raw_implied_variance(
+ self, omega_NEAR_t, sigma2_NEAR_t, omega_NEXT_t, sigma2_NEXT_t
+ ):
+ """
+ Calculate the raw value of implied variance at the index maturity.
+
+ Parameters:
+ omega_NEAR_t (float): Weight for the near term variance.
+ sigma2_NEAR_t (float): Near term variance.
+ omega_NEXT_t (float): Weight for the next term variance.
+ sigma2_NEXT_t (float): Next term variance.
+
+ Returns:
+ float: The raw value of implied variance at the index maturity.
+ """
+ sigma2_RAW_t = omega_NEAR_t * sigma2_NEAR_t + omega_NEXT_t * sigma2_NEXT_t
+ return sigma2_RAW_t
+
+ def calculate_ewma(self, lambda_param, sigma2_SMOOTH_t_minus_1, sigma2_RAW_t):
+ """
+ Calculate the Exponentially-Weighted Moving Average (EWMA) of raw implied variance.
+
+ Parameters:
+ lambda_param (float): The smoothing parameter lambda.
+ sigma2_SMOOTH_t_minus_1 (float): The previous value of the smoothed implied variance.
+ sigma2_RAW_t (float): The raw implied variance at time t.
+
+ Returns:
+ float: The smoothed implied variance at time t.
+ """
+ sigma2_SMOOTH_t = (
+ lambda_param * sigma2_SMOOTH_t_minus_1 + (1 - lambda_param) * sigma2_RAW_t
+ )
+ return sigma2_SMOOTH_t
+
+ def calculate_ewma_recursive(
+ self, lambda_param, tau, sigma2_SMOOTH_previous, sigma2_RAW_history
+ ):
+ """
+ Calculate the Exponentially-Weighted Moving Average (EWMA) of raw implied variance recursively.
+
+ Parameters:
+ lambda_param (float): The smoothing parameter lambda.
+ tau (int): The number of periods over which the half-life is defined.
+ sigma2_SMOOTH_previous (float): The smoothed variance at time t-tau.
+ sigma2_RAW_history (list of float): The raw implied variances from time t-tau to t-1.
+
+ Returns:
+ float: The smoothed implied variance at time t.
+ """
+ ewma = lambda_param**tau * sigma2_SMOOTH_previous
+ for i in range(tau):
+ ewma += (1 - lambda_param) * (lambda_param**i) * sigma2_RAW_history[i]
+ return ewma
+
+ def calculate_lambda_with_half_life(self, tau):
+ """
+ Calculate the smoothing parameter lambda based on the specified half-life tau.
+
+ Parameters:
+ tau (float): The half-life of the exponentially-weighted moving average in seconds.
+
+ Returns:
+ float: The calculated smoothing parameter lambda.
+ """
+ lambda_param = np.exp(-np.log(2) / tau)
+ return lambda_param
+
+ def calculate_xVIV(self, sigma_smooth_t):
+ """
+ Calculate the xVIV value based on the given smoothed variance at time t.
+
+ Parameters:
+ sigma_smooth_t (float): The smoothed variance at time t.
+
+ Returns:
+ float: The calculated xVIV value.
+ """
+ return 100 * np.sqrt(sigma_smooth_t**2)
+
+ def calculate_katm_strike(self, strikes, option_prices):
+ """
+ Calculate the ATM strike based on the given strikes and option prices.
+
+ Parameters:
+ strikes (list of float): The strikes of the options.
+ option_prices (list of float): The prices of the options.
+
+ Returns:
+ float: The calculated ATM strike.
+ """
+ # Find the index of the minimum value in the option prices
+ min_index = np.argmin(option_prices)
+
+ # Return the strike at the index
+ return strikes[min_index]
diff --git a/feeds/crypto_indices/mcap1000.py b/feeds/crypto_indices/mcap1000.py
index fa8a412..115dfa4 100644
--- a/feeds/crypto_indices/mcap1000.py
+++ b/feeds/crypto_indices/mcap1000.py
@@ -3,11 +3,12 @@
from apis.coinmarketcap import CoinMarketCapAPI as coinmarketcap
from apis.coingecko import CoinGeckoAPI as coingecko
+
# from apis.cryptocompare import CryptoCompareAPI as cryptocompare
class MCAP1000(DataFeed):
- NAME = 'mcap1000'
+ NAME = "mcap1000"
ID = 2
HEARTBEAT = 180
DATAPOINT_DEQUE = deque([], maxlen=100)
@@ -15,20 +16,20 @@ class MCAP1000(DataFeed):
@classmethod
def process_source_data_into_siwa_datapoint(cls):
- '''
- Process data from multiple sources
- '''
+ """
+ Process data from multiple sources
+ """
res = []
for source in [
# cryptocompare,
coinmarketcap,
- coingecko
+ coingecko,
]:
market_data = source().fetch_mcap_by_rank(cls.N)
if market_data is None:
continue
mcaps = sorted(list(market_data.keys()), reverse=True)
- res.append(sum(mcaps[:cls.N]))
+ res.append(sum(mcaps[: cls.N]))
if sum(res) == 0:
return cls.DATAPOINT_DEQUE[-1] # Should fail if DEQUE is empty
else:
diff --git a/feeds/data_feed.py b/feeds/data_feed.py
index c218a8d..49d6fa4 100644
--- a/feeds/data_feed.py
+++ b/feeds/data_feed.py
@@ -1,4 +1,4 @@
-#stdlib
+# stdlib
import os
import time
import logging
@@ -8,33 +8,33 @@
from datetime import datetime, timezone
from dataclasses import dataclass
-#third party
+# third party
import pandas as pd
-#our stuff
+# our stuff
import constants as c
#'%(asctime)s:%(thread)d - %(name)s - %(levelname)s - %(message)s')
-logger = logging.getLogger('SQLLogger')
+logger = logging.getLogger("SQLLogger")
logger.setLevel(logging.INFO)
-logger.propagate = False # TODO determine if undesirable
+logger.propagate = False # TODO determine if undesirable
+
@dataclass
class DataFeed:
- ''' The base-level implementation for all data feeds, which should inherit from DataFeed and implement the get_data_point method as required.
- '''
+ """The base-level implementation for all data feeds, which should inherit from DataFeed and implement the get_data_point method as required."""
- #NOTE: all child classes must define these class-level attributes
+ # NOTE: all child classes must define these class-level attributes
CHAIN: str
NAME: str
ID: int
- HEARTBEAT: int #in seconds
- START_TIME: float #unix timestamp
+ HEARTBEAT: int # in seconds
+ START_TIME: float # unix timestamp
DATAPOINT_DEQUE: deque
- #NOTE: the below are default attrs inherited by child classes
+ # NOTE: the below are default attrs inherited by child classes
ACTIVE: bool = False
- COUNT: int = 0 #number of data points served since starting
+ COUNT: int = 0 # number of data points served since starting
DATA_KEYS = (c.FEED_NAME, c.TIME_STAMP, c.DATA_POINT)
@classmethod
@@ -43,39 +43,39 @@ def get_data_dir(cls):
@classmethod
def start(cls):
- ''' flag feed as active so it can start receiving/processing data '''
+ """flag feed as active so it can start receiving/processing data"""
cls.START_TIME = time.time()
cls.ACTIVE = True
@classmethod
def stop(cls):
- ''' stop / pause feed from receiving/processing data
+ """stop / pause feed from receiving/processing data
for some feeds, this may involve some cleanup, disconnecting a stream etc.
- and would be handled in the overridden stop() method in that specific feed'''
+ and would be handled in the overridden stop() method in that specific feed"""
cls.ACTIVE = False
@classmethod
def run(cls):
- ''' run the data generating function(s)
+ """run the data generating function(s)
for some feeds this may be a loop,
in others it may be handled by a library e.g. tweepy (twitter) stream
- in that case there would be an overridden run() method in that feed'''
+ in that case there would be an overridden run() method in that feed"""
while cls.ACTIVE:
dp = cls.create_new_data_point()
- logger.info(f'\nNext data point for {cls.NAME}: {dp}\n')
+ logger.info(f"\nNext data point for {cls.NAME}: {dp}\n")
cls.DATAPOINT_DEQUE.append(dp)
cls.COUNT += 1
time.sleep(cls.HEARTBEAT)
@classmethod
def create_new_data_point(cls):
- ''' NOTE: this method must be implemented by the child class '''
+ """NOTE: this method must be implemented by the child class"""
raise NotImplementedError
@classmethod
def get_most_recently_stored_data_point(cls):
- ''' pass '''
+ """pass"""
data_point = cls.DATAPOINT_DEQUE[-1] if len(cls.DATAPOINT_DEQUE) else None
to_serve = (cls.NAME, time.time(), data_point)
return dict(zip(cls.DATA_KEYS, to_serve))
diff --git a/feeds/test_feed.py b/feeds/test_feed.py
index 7539ada..130fa03 100644
--- a/feeds/test_feed.py
+++ b/feeds/test_feed.py
@@ -1,12 +1,12 @@
from feeds.data_feed import DataFeed
-from collections import deque
+from collections import deque
from dataclasses import dataclass
import constants as c
from numpy import random
class Test(DataFeed):
- NAME = 'test'
+ NAME = "tests"
ID = 0
HEARTBEAT = 1
DATAPOINT_DEQUE = deque([], maxlen=100)
diff --git a/moving_average.py b/moving_average.py
index f0d32cf..2a9dd7c 100644
--- a/moving_average.py
+++ b/moving_average.py
@@ -17,11 +17,11 @@
y_values = []
for doc in documents:
- for detail in doc.get('detail', []): # Access the 'detail' field if it exists
- if detail['type'] == 'inscribe-transfer': # It can be filtered based on types
- blocktime = detail['blocktime'] # Get the blocktime
- satoshi = detail['satoshi'] # Get the satoshi value
-
+ for detail in doc.get("detail", []): # Access the 'detail' field if it exists
+ if detail["type"] == "inscribe-transfer": # It can be filtered based on types
+ blocktime = detail["blocktime"] # Get the blocktime
+ satoshi = detail["satoshi"] # Get the satoshi value
+
# Convert blocktime to a datetime object and append to X values
x_values.append(datetime.fromtimestamp(blocktime))
# Append satoshi to Y values
@@ -32,29 +32,29 @@
# Calculate moving average using numpy's convolve function
weights = np.ones(window_size) / window_size
-sma_values = np.convolve(y_values, weights, mode='valid')
+sma_values = np.convolve(y_values, weights, mode="valid")
# Trim x_values to match the length of sma_values (since the convolution reduces the length)
-sma_x_values = x_values[window_size - 1:]
+sma_x_values = x_values[window_size - 1 :]
# Create the plot
plt.figure(figsize=(16, 8))
# Plot original Satoshi values
-plt.plot_date(x_values, y_values, linestyle='solid', label='Original')
+plt.plot_date(x_values, y_values, linestyle="solid", label="Original")
# Plot SMA Satoshi values
-plt.plot_date(sma_x_values, sma_values, linestyle='solid', color='red', label='SMA')
+plt.plot_date(sma_x_values, sma_values, linestyle="solid", color="red", label="SMA")
# Format the plot
plt.gcf().autofmt_xdate() # Format the date on the x-axis
-date_format = mdates.DateFormatter('%Y-%m-%d %H:%M:%S')
+date_format = mdates.DateFormatter("%Y-%m-%d %H:%M:%S")
plt.gca().xaxis.set_major_formatter(date_format)
# Add titles, labels, and legend
-plt.title('Satoshi Value Over Time with SMA')
-plt.xlabel('DateTime')
-plt.ylabel('Satoshi')
+plt.title("Satoshi Value Over Time with SMA")
+plt.xlabel("DateTime")
+plt.ylabel("Satoshi")
plt.legend()
# Show the plot
diff --git a/requirements.txt b/requirements.txt
index c823176..49f8ee1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,8 +2,9 @@ cmd2==2.4.3
numpy==1.24.2
pandas==1.5.3
requests==2.28.1
+ccxt==4.2.6
python-dotenv
pymongo
matplotlib
pandas
-numpy
\ No newline at end of file
+numpy
diff --git a/research/taking_residual.py b/research/taking_residual.py
index 867eb3b..08fb702 100644
--- a/research/taking_residual.py
+++ b/research/taking_residual.py
@@ -13,30 +13,32 @@
# If you received raw dictionary-like objects and need to normalize datetime field:
for doc in documents:
- if isinstance(doc['timestamp'], (int, float)): # Assuming UNIX timestamp in seconds
- doc['timestamp'] = datetime.fromtimestamp(doc['timestamp'])
+ if isinstance(doc["timestamp"], (int, float)): # Assuming UNIX timestamp in seconds
+ doc["timestamp"] = datetime.fromtimestamp(doc["timestamp"])
# Convert documents to DataFrame
df = pd.DataFrame(documents)
# Convert 'timestamp' to datetime if necessary and set it as index
-df['timestamp'] = pd.to_datetime(df['timestamp'], unit='s') # Adjust 'unit' as per requirement
-df = df.set_index('timestamp')
+df["timestamp"] = pd.to_datetime(
+ df["timestamp"], unit="s"
+) # Adjust 'unit' as per requirement
+df = df.set_index("timestamp")
df.sort_index(inplace=True)
# Calculate simple moving averages (SMA) to represent `twap_60min` and `twap_10min`
-window_sizes = {'twap_60min': 60, 'twap_10min': 10}
+window_sizes = {"twap_60min": 60, "twap_10min": 10}
for name, window in window_sizes.items():
- df[name] = df['value'].rolling(window, min_periods=1).mean()
+ df[name] = df["value"].rolling(window, min_periods=1).mean()
# Calculate residuals by subtracting the TWAPs from the original values
for col in window_sizes.keys():
- df[f'{col}_residual'] = df['value'] - df[col]
+ df[f"{col}_residual"] = df["value"] - df[col]
# Plot the residuals
plt.figure(figsize=(12, 6))
-plt.plot(df.index, df['twap_60min_residual'], label='60-min TWAP Residuals')
-plt.plot(df.index, df['twap_10min_residual'], label='10-min TWAP Residuals')
+plt.plot(df.index, df["twap_60min_residual"], label="60-min TWAP Residuals")
+plt.plot(df.index, df["twap_10min_residual"], label="10-min TWAP Residuals")
plt.title("Residuals of BRC20 Time Series Data")
plt.xlabel("Timestamp")
plt.ylabel("Residual value")
@@ -47,14 +49,18 @@
# Now, the df contains original data, the TWAP values, and the residuals.
# You can save the DataFrame back into MongoDB, in a new collection for example:
-new_collection = db['brc20_residuals']
+new_collection = db["brc20_residuals"]
# Convert the DataFrame to dict and store it
-residuals_dict = df[['twap_60min_residual', 'twap_10min_residual']].to_dict("records")
+residuals_dict = df[["twap_60min_residual", "twap_10min_residual"]].to_dict("records")
# If you wish to store them in the database, you can use insert or update
# Depending on your requirement this could be:
# new_collection.insert_many(residuals_dict)
# or to update existing documents with new fields:
for index, row in df.iterrows():
- update = {"$set": {"twap_60min_residual": row['twap_60min_residual'],
- "twap_10min_residual": row['twap_10min_residual']}}
- collection.update_one({"_id": row['_id']}, update)
+ update = {
+ "$set": {
+ "twap_60min_residual": row["twap_60min_residual"],
+ "twap_10min_residual": row["twap_10min_residual"],
+ }
+ }
+ collection.update_one({"_id": row["_id"]}, update)
diff --git a/series_fetching.py b/series_fetching.py
index 5d296e8..e1b204d 100644
--- a/series_fetching.py
+++ b/series_fetching.py
@@ -31,11 +31,12 @@ def connection_db(ticker):
collection = db[ticker] # Select the collection based on the ticker name.
return collection
+
# Connect to the database for the first BRC20 token in the list.
collection = connection_db(brc20_list[0])
# Retrieve the last document from the collection, sorted by the _id field (descending).
-last_document = collection.find_one({}, sort=[('_id', pymongo.DESCENDING)])
+last_document = collection.find_one({}, sort=[("_id", pymongo.DESCENDING)])
# Determine the block height from which to start processing records.
start_block_height = brc20_ticker_info.json()["data"]["deployHeight"]
@@ -45,18 +46,24 @@ def connection_db(ticker):
# Remove the last document which is potentially partial or incomplete.
collection.delete_one({"_id": last_document["_id"]})
+
def store_db():
global collection # Declare the global collection variable to be used within this function.
# Loop through block heights starting from the determined start block height up to the best block height.
for height in range(start_block_height, best_block_height - 1):
- print("Block height: ", height) # Output the current block height being processed.
+ print(
+ "Block height: ", height
+ ) # Output the current block height being processed.
# Process each event type.
for event_type in event_types:
# Query the BRC20 ticker history for the current block height and event type.
- respond = unisat_api.get_brc20_ticker_history(brc20_list[0], height, event_type, 0, 100).json()["data"]["detail"]
+ respond = unisat_api.get_brc20_ticker_history(
+ brc20_list[0], height, event_type, 0, 100
+ ).json()["data"]["detail"]
# If the response is not empty, insert the data into the MongoDB collection.
if respond is not []:
collection.insert_one(respond)
+
# Call the store_db function to start storing records into the database.
store_db()
diff --git a/siwa.py b/siwa.py
index 1ac6935..6003a0d 100644
--- a/siwa.py
+++ b/siwa.py
@@ -1,4 +1,4 @@
-#stdlib
+# stdlib
import os
import sys
import logging
@@ -7,10 +7,10 @@
import time
from datetime import datetime, timezone
-#third party
+# third party
import cmd2
-#our stuff
+# our stuff
from all_feeds import all_feeds
import constants as c
@@ -24,41 +24,45 @@ def get_params():
parser = argparse.ArgumentParser()
parser.add_argument(
- '--datafeeds',
- nargs='+',
+ "--datafeeds",
+ nargs="+",
default=[],
- help='List of datafeeds to start, separated by commas. Call like this: python siwa.py --datafeeds feed1 feed2 feed3'
+ help="List of datafeeds to start, separated by commas. Call like this: python siwa.py --datafeeds feed1 feed2 feed3",
)
args = parser.parse_args()
datafeeds = [all_feeds[f] for f in args.datafeeds]
return datafeeds
+
def start_feeds(feeds):
- ''' start all feeds in feeds list '''
+ """start all feeds in feeds list"""
for feed in feeds:
- #(re)activate feed / allow it to start or resume processing
+ # (re)activate feed / allow it to start or resume processing
feed.start()
- #print datafeed startup message to CLI
+ # print datafeed startup message to CLI
print(c.start_message(feed))
- #create new thread *only if* one doesn't already exist
+ # create new thread *only if* one doesn't already exist
if not feed.NAME in datafeed_threads:
thread = threading.Thread(target=feed.run)
thread.start()
datafeed_threads[feed.NAME] = thread
+
def stop_feeds(feeds):
- ''' stop *and kill thread for* all feeds in a list '''
+ """stop *and kill thread for* all feeds in a list"""
for feed in feeds:
feed.stop()
datafeed_threads[feed.NAME].join()
- del(datafeed_threads[feed.NAME])
+ del datafeed_threads[feed.NAME]
+
class Siwa(cmd2.Cmd):
- ''' siwa CLI: allows user to start/stop datafeeds, list feed statuses '''
- prompt = '\nSIWA> '
+ """siwa CLI: allows user to start/stop datafeeds, list feed statuses"""
+
+ prompt = "\nSIWA> "
def __init__(self):
super().__init__()
@@ -67,63 +71,68 @@ def __init__(self):
self.init_time = time.time()
self.debug = c.DEBUG
if self.debug:
- self.poutput(':::DEBUG MODE ENABLED:::')
+ self.poutput(":::DEBUG MODE ENABLED:::")
def do_status(self, args: cmd2.Statement):
- '''show status (active, inactive) for all datafeeds,
+ """show status (active, inactive) for all datafeeds,
if debug enabled, also show status of threads;
- inactive datafeeds merely sleep, they do not close their threads'''
- #if -v then shows params too
+ inactive datafeeds merely sleep, they do not close their threads"""
+ # if -v then shows params too
self.poutput(c.init_time_message(self))
for feed in all_feeds.values():
self.poutput(c.status_message(feed))
- self.poutput(f'{feed.NAME} deque len: {len(feed.DATAPOINT_DEQUE)}')
+ self.poutput(f"{feed.NAME} deque len: {len(feed.DATAPOINT_DEQUE)}")
if c.DEBUG:
threadcount = threading.active_count()
- datafeed_threadcount = threading.active_count() - 1 - 1 - c.WEBSERVER_THREADS
+ datafeed_threadcount = (
+ threading.active_count() - 1 - 1 - c.WEBSERVER_THREADS
+ )
endpoint_threadcount = 1 + c.WEBSERVER_THREADS
- self.poutput(f'''
+ self.poutput(
+ f"""
--- THREAD DEBUG INFO ---
datafeed threads running: {datafeed_threadcount}
total threads: {threadcount} (1 main, {endpoint_threadcount} endpoint, and {datafeed_threadcount} feeds)
- feeds threads running: {list(datafeed_threads.keys()) or '[none]'}''')
+ feeds threads running: {list(datafeed_threads.keys()) or '[none]'}"""
+ )
def do_start(self, args: cmd2.Statement):
- '''start specified feed, if none specified start all;
- create new thread for feed if none extant'''
+ """start specified feed, if none specified start all;
+ create new thread for feed if none extant"""
if args:
- #start specific feed, if given
+ # start specific feed, if given
feeds = [all_feeds[f] for f in args.arg_list]
else:
- #else start all feeds
+ # else start all feeds
feeds = all_feeds.values()
start_feeds(feeds)
def do_stop(self, args: cmd2.Statement):
- '''stop datafeed processing
- (thread remains running in case we want to re-activate)'''
+ """stop datafeed processing
+ (thread remains running in case we want to re-activate)"""
if args:
- #stop specific feed, if given
+ # stop specific feed, if given
feeds = [all_feeds[f] for f in args.arg_list]
else:
- #else stop all active feeds
+ # else stop all active feeds
feeds = [f for f in all_feeds.values() if f.ACTIVE]
for feed in feeds:
self.poutput(c.stop_message(feed))
stop_feeds([feed])
- def do_quit(self,args: cmd2.Statement):
+ def do_quit(self, args: cmd2.Statement):
"""Exit the application"""
- self.poutput('quitting; waiting for heartbeat timeout')
+ self.poutput("quitting; waiting for heartbeat timeout")
for feed in all_feeds.values():
feed.stop()
return True
-if __name__ == '__main__':
+
+if __name__ == "__main__":
args = get_params()
if args:
start_feeds(args)
diff --git a/test/test_unisat.py b/test/test_unisat.py
index 59d0e24..98d7ed8 100644
--- a/test/test_unisat.py
+++ b/test/test_unisat.py
@@ -1,3 +1,27 @@
+# from apis import unisat
+# import unittest
+#
+#
+# class TestUnisat(unittest.TestCase):
+# # @classmethod
+# # def setUpClass(cls):
+# # # if os.path.exists(Test.get_data_dir()):
+# # ...
+# # # os.remove(Test.get_data_dir())
+# #
+# # @classmethod
+# # def tearDownClass(cls):
+# # ...
+# #
+# # def test_get_blockchain_info(self):
+# # self.assertListEqual(
+# # list(unisat.get_blockchain_info().json().keys()), ["code", "msg", "data"]
+# # )
+#
+#
+# if __name__ == "__main__":
+# unittest.main()
+
import sys
import os
@@ -23,35 +47,107 @@ def tearDown(self):
def test_get_blockchain_info(self):
self.assertListEqual(
list(self.UnisatAPI.get_blockchain_info().json()["data"].keys()),
- ['chain', 'blocks', 'headers', 'bestBlockHash', 'prevBlockHash', 'difficulty', 'medianTime', 'chainwork'],
+ [
+ "chain",
+ "blocks",
+ "headers",
+ "bestBlockHash",
+ "prevBlockHash",
+ "difficulty",
+ "medianTime",
+ "chainwork",
+ ],
)
def test_get_block_txs(self):
height = 824631
self.assertListEqual(
list(self.UnisatAPI.get_block_txs(height).json()["data"][0].keys()),
- ['txid', 'nIn', 'nOut', 'size', 'witOffset', 'locktime', 'inSatoshi', 'outSatoshi', 'nNewInscription', 'nInInscription', 'nOutInscription', 'nLostInscription', 'timestamp', 'height', 'blkid', 'idx', 'confirmations'],
+ [
+ "txid",
+ "nIn",
+ "nOut",
+ "size",
+ "witOffset",
+ "locktime",
+ "inSatoshi",
+ "outSatoshi",
+ "nNewInscription",
+ "nInInscription",
+ "nOutInscription",
+ "nLostInscription",
+ "timestamp",
+ "height",
+ "blkid",
+ "idx",
+ "confirmations",
+ ],
)
def test_get_block_txs(self):
- txid = '45a76470f80982d769b1974181cd4f7261084ac8db3dcb1cd4547f9fe91590cf'
+ txid = "45a76470f80982d769b1974181cd4f7261084ac8db3dcb1cd4547f9fe91590cf"
self.assertListEqual(
list(self.UnisatAPI.get_tx_info(txid).json()["data"].keys()),
- ['txid', 'nIn', 'nOut', 'size', 'witOffset', 'locktime', 'inSatoshi', 'outSatoshi', 'nNewInscription', 'nInInscription', 'nOutInscription', 'nLostInscription', 'timestamp', 'height', 'blkid', 'idx', 'confirmations'],
+ [
+ "txid",
+ "nIn",
+ "nOut",
+ "size",
+ "witOffset",
+ "locktime",
+ "inSatoshi",
+ "outSatoshi",
+ "nNewInscription",
+ "nInInscription",
+ "nOutInscription",
+ "nLostInscription",
+ "timestamp",
+ "height",
+ "blkid",
+ "idx",
+ "confirmations",
+ ],
)
def test_get_inscription_utxo(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
self.assertListEqual(
list(self.UnisatAPI.get_inscription_utxo(address).json()["data"].keys()),
- ['cursor', 'total', 'totalConfirmed', 'totalUnconfirmed', 'totalUnconfirmedSpend', 'utxo'],
+ [
+ "cursor",
+ "total",
+ "totalConfirmed",
+ "totalUnconfirmed",
+ "totalUnconfirmedSpend",
+ "utxo",
+ ],
)
def test_get_inscription_info(self):
- inscriptionid = '75017937ad1de1f50709910aa5889be9c7d8f019a1c02922d291f9bfa9a8b0fei0'
+ inscriptionid = (
+ "75017937ad1de1f50709910aa5889be9c7d8f019a1c02922d291f9bfa9a8b0fei0"
+ )
self.assertListEqual(
- list(self.UnisatAPI.get_inscription_utxo(inscriptionid).json()["data"].keys()),
- ['utxo', 'address', 'offset', 'inscriptionIndex', 'inscriptionNumber', 'inscriptionId', 'contentType', 'contentLength', 'contentBody', 'height', 'timestamp', 'inSatoshi', 'outSatoshi', 'brc20', 'detail'],
+ list(
+ self.UnisatAPI.get_inscription_utxo(inscriptionid).json()["data"].keys()
+ ),
+ [
+ "utxo",
+ "address",
+ "offset",
+ "inscriptionIndex",
+ "inscriptionNumber",
+ "inscriptionId",
+ "contentType",
+ "contentLength",
+ "contentBody",
+ "height",
+ "timestamp",
+ "inSatoshi",
+ "outSatoshi",
+ "brc20",
+ "detail",
+ ],
)
def test_get_brc20_list(self):
@@ -59,7 +155,7 @@ def test_get_brc20_list(self):
limit = 100
self.assertListEqual(
list(self.UnisatAPI.get_brc20_list(start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ ["height", "total", "start", "detail"],
)
def test_get_brc20_status(self):
@@ -68,22 +164,49 @@ def test_get_brc20_status(self):
sort = "holders"
complete = "yes"
self.assertListEqual(
- list(self.UnisatAPI.get_brc20_status(start, limit, sort, complete).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_brc20_status(start, limit, sort, complete)
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
def test_get_brc20_ticker_info(self):
ticker = "EFIL"
self.assertListEqual(
list(self.UnisatAPI.get_brc20_ticker_info(ticker).json()["data"].keys()),
- ['ticker', 'holdersCount', 'historyCount', 'inscriptionNumber', 'inscriptionId', 'max', 'limit', 'minted', 'totalMinted', 'confirmedMinted', 'confirmedMinted1h', 'confirmedMinted24h', 'mintTimes', 'decimal', 'creator', 'txid', 'deployHeight', 'deployBlocktime', 'completeHeight', 'completeBlocktime', 'inscriptionNumberStart', 'inscriptionNumberEnd'],
+ [
+ "ticker",
+ "holdersCount",
+ "historyCount",
+ "inscriptionNumber",
+ "inscriptionId",
+ "max",
+ "limit",
+ "minted",
+ "totalMinted",
+ "confirmedMinted",
+ "confirmedMinted1h",
+ "confirmedMinted24h",
+ "mintTimes",
+ "decimal",
+ "creator",
+ "txid",
+ "deployHeight",
+ "deployBlocktime",
+ "completeHeight",
+ "completeBlocktime",
+ "inscriptionNumberStart",
+ "inscriptionNumberEnd",
+ ],
)
def test_get_brc20_holders(self):
ticker = "EFIL"
self.assertListEqual(
list(self.UnisatAPI.get_brc20_holders(ticker).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ ["height", "total", "start", "detail"],
)
# Need to try with correct params
@@ -103,76 +226,122 @@ def test_get_history_by_height(self):
start = 0
limit = 100
self.assertListEqual(
- list(self.UnisatAPI.get_history_by_height(height, start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_history_by_height(height, start, limit)
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
def test_get_brc20_tx_history(self):
ticker = "EFIL"
- txid = '45a76470f80982d769b1974181cd4f7261084ac8db3dcb1cd4547f9fe91590cf'
+ txid = "45a76470f80982d769b1974181cd4f7261084ac8db3dcb1cd4547f9fe91590cf"
start = 0
limit = 100
self.assertListEqual(
- list(self.UnisatAPI.get_brc20_tx_history(ticker, txid, start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_brc20_tx_history(ticker, txid, start, limit)
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
def test_get_address_brc20_summary(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
start = 0
limit = 100
self.assertListEqual(
- list(self.UnisatAPI.get_address_brc20_summary(address, start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_address_brc20_summary(address, start, limit)
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
def test_get_address_brc20_summary_by_height(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
height = 824631
start = 0
limit = 100
self.assertListEqual(
- list(self.UnisatAPI.get_address_brc20_summary_by_height(address, height, start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_address_brc20_summary_by_height(
+ address, height, start, limit
+ )
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
def test_get_address_brc20_ticker_info(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
ticker = "EFIL"
self.assertListEqual(
- list(self.UnisatAPI.get_address_brc20_ticker_info(address, ticker).json()["data"].keys()),
- ['ticker', 'overallBalance', 'transferableBalance', 'availableBalance', 'availableBalanceSafe', 'availableBalanceUnSafe', 'transferableCount', 'transferableInscriptions', 'historyCount', 'historyInscriptions'],
+ list(
+ self.UnisatAPI.get_address_brc20_ticker_info(address, ticker)
+ .json()["data"]
+ .keys()
+ ),
+ [
+ "ticker",
+ "overallBalance",
+ "transferableBalance",
+ "availableBalance",
+ "availableBalanceSafe",
+ "availableBalanceUnSafe",
+ "transferableCount",
+ "transferableInscriptions",
+ "historyCount",
+ "historyInscriptions",
+ ],
)
def test_get_address_brc20_history(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
start = 0
limit = 100
self.assertListEqual(
- list(self.UnisatAPI.get_address_brc20_history(address, start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_address_brc20_history(address, start, limit)
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
def test_get_address_brc20_history_by_ticker(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
ticker = "EFIL"
type = "inscribe-deploy"
start = 0
limit = 100
self.assertListEqual(
- list(self.UnisatAPI.get_address_brc20_history_by_ticker(address, ticker, type, start, limit).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_address_brc20_history_by_ticker(
+ address, ticker, type, start, limit
+ )
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
-
+
def test_get_transferable_inscriptions(self):
- address = '1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6'
+ address = "1K6KoYC69NnafWJ7YgtrpwJxBLiijWqwa6"
ticker = "EFIL"
self.assertListEqual(
- list(self.UnisatAPI.get_transferable_inscriptions(address, ticker).json()["data"].keys()),
- ['height', 'total', 'start', 'detail'],
+ list(
+ self.UnisatAPI.get_transferable_inscriptions(address, ticker)
+ .json()["data"]
+ .keys()
+ ),
+ ["height", "total", "start", "detail"],
)
-
if __name__ == "__main__":
unittest.main()