From dbe5ff22b896228c1dabe51fbf4e3286d7c9b81b Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 28 Jul 2022 16:02:51 +0100 Subject: [PATCH 01/57] Ran prettier --- .../lnurlpayout/templates/lnurlpayout/_api_docs.html | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/lnbits/extensions/lnurlpayout/templates/lnurlpayout/_api_docs.html b/lnbits/extensions/lnurlpayout/templates/lnurlpayout/_api_docs.html index 4f921bb57..afe24c423 100644 --- a/lnbits/extensions/lnurlpayout/templates/lnurlpayout/_api_docs.html +++ b/lnbits/extensions/lnurlpayout/templates/lnurlpayout/_api_docs.html @@ -4,12 +4,7 @@ label="API info" :content-inset-level="0.5" > - + @@ -38,7 +33,6 @@ expand-separator label="Create a lnurlpayout" > - Date: Fri, 30 Sep 2022 09:17:20 +0100 Subject: [PATCH 02/57] Disable debug --- lnbits/extensions/gerty/models.py | 1 + lnbits/extensions/gerty/views_api.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/models.py b/lnbits/extensions/gerty/models.py index fc7a33774..5cbb08f44 100644 --- a/lnbits/extensions/gerty/models.py +++ b/lnbits/extensions/gerty/models.py @@ -9,6 +9,7 @@ class Gerty(BaseModel): name: str wallet: str refresh_time: int = Query(None) + debug_enabled: int = Query(None) lnbits_wallets: str = Query(None) # Wallets to keep an eye on, {"wallet-id": "wallet-read-key, etc"} mempool_endpoint: str = Query(None) # Mempool endpoint to use exchange: str = Query(None) # BTC <-> Fiat exchange rate to pull ie "USD", in 0.0001 and sats diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index b5852ee6e..da903f686 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -140,7 +140,7 @@ async def api_gerty_json( "refreshTime": gerty.refresh_time, "requestTimestamp": round(time.time()), "nextScreenNumber": next_screen_number, - "showTextBoundRect": True, + "showTextBoundRect": False, "name": gerty.name }, "screen": { From d2b4d6c837440990ed395512a73734a131802692 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Fri, 30 Sep 2022 09:30:08 +0100 Subject: [PATCH 03/57] bug squash --- lnbits/extensions/gerty/models.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lnbits/extensions/gerty/models.py b/lnbits/extensions/gerty/models.py index 5cbb08f44..fc7a33774 100644 --- a/lnbits/extensions/gerty/models.py +++ b/lnbits/extensions/gerty/models.py @@ -9,7 +9,6 @@ class Gerty(BaseModel): name: str wallet: str refresh_time: int = Query(None) - debug_enabled: int = Query(None) lnbits_wallets: str = Query(None) # Wallets to keep an eye on, {"wallet-id": "wallet-read-key, etc"} mempool_endpoint: str = Query(None) # Mempool endpoint to use exchange: str = Query(None) # BTC <-> Fiat exchange rate to pull ie "USD", in 0.0001 and sats From 454ae1bf95689cdd3cd8b306b53648bd07b4afbe Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Fri, 30 Sep 2022 09:37:28 +0100 Subject: [PATCH 04/57] Split text for gerty --- lnbits/extensions/gerty/views_api.py | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index da903f686..1f4cce6e8 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -1,6 +1,7 @@ import math from http import HTTPStatus import json +import textwrap import httpx import random import os @@ -267,8 +268,30 @@ async def get_exchange_rate(gerty): # A helper function get a nicely formated dict for the text def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int = None): + # Get line size by font size + line_width = 60 + if font_size <= 12: + line_width = 80 + elif font_size <= 15: + line_width = 60 + elif font_size <= 20: + line_width = 40 + elif font_size <= 40: + line_width = 30 + else: + line_width = 20 + + # wrap the text + wrapper = textwrap.TextWrapper(width=line_width) + word_list = wrapper.wrap(text=text) + + multilineText = '\n'.join(word_list) + + # logger.debug('multilineText') + # logger.debug(multilineText) + text = { - "value": text, + "value": multilineText, "size": font_size } if x_pos is None and y_pos is None: From a1fbd1056157542c81e4f5982f5a047b7b4377a8 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Fri, 30 Sep 2022 09:48:04 +0100 Subject: [PATCH 05/57] Drop satoshit quote font size --- lnbits/extensions/gerty/views_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 1f4cce6e8..199304846 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -236,9 +236,9 @@ async def get_satoshi_quotes(): quote = await api_gerty_satoshi() if quote: if quote['text']: - text.append(get_text_item_dict(quote['text'], 15)) + text.append(get_text_item_dict(quote['text'], 12)) if quote['date']: - text.append(get_text_item_dict(quote['date'], 15)) + text.append(get_text_item_dict("Satoshi Nakamoto - {0}".format(quote['date']), 15)) return text From ebc0e60cc9a0b691ff499e5e25ab6c09fd066a75 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sat, 1 Oct 2022 16:21:46 +0100 Subject: [PATCH 06/57] Added Wuille facts and tweaked line lengths --- lnbits/extensions/gerty/static/pieter_wuille.json | 14 ++++++++++++-- lnbits/extensions/gerty/views_api.py | 4 ++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/lnbits/extensions/gerty/static/pieter_wuille.json b/lnbits/extensions/gerty/static/pieter_wuille.json index 986150ed2..9dec9f679 100644 --- a/lnbits/extensions/gerty/static/pieter_wuille.json +++ b/lnbits/extensions/gerty/static/pieter_wuille.json @@ -7,8 +7,18 @@ "Dan Bernstein only trusts one source of random numbers: Pieter Wuille.", "Putting Pieter Wuille in the title of an r/Bitcoin submission gets more upvotes than the same post from Pieter Wuille himself.", "Pieter Wuille won the underhanded crypto contest but his entry was so underhanded nobody even knows he entered.", - "Greg Maxwell is a bot created by Pieter Wuille to argue on reddit while he can get code done.", + "Greg Maxwell is a bot created by Pieter Wuille to argue on reddit so he can get code done.", "Pieter Wuille doesn't need the public key to calculate the corresponding private key.", - "When the Wikipedia servers corrupted all data including backups, Pieter Wuille had to stay awake all night to retype it." + "When the Wikipedia servers corrupted all data including backups, Pieter Wuille had to stay awake all night to retype it.", + "It is a Bitcoin consensus rule that when Pieter's hard drive is full no more blocks can be added.", + "When they go out, Pieter Wuille pays for his parents.", + "Pieter Wuille replaced the existing monetary system by writing a few thousand lines of code.", + "Putting Pieter Wuille in the title of an r/Bitcoin submission gets more upvotes than the same post from Pieter Wuille himself.", + "Only Pieter Wuille can name things harder to pronounce than Pieter Wuille.", + "Pieter Wuille doesn't write code, he wills it into existence.", + "If every copy of the blockchain were deleted Pieter Wuille would recreate it from memory.", + "If all else fails, bitcoin should be restarted by syncing the code and the blockchain directly from Wuille's mind.", + "Pieter Wuille codes // Enlightened Zen master floats // Haikus trickle down.", + "Pieter Wuille once wrote a constant time generator for generating constant time cryptographic code." ] } \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 199304846..952142dbe 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -271,9 +271,9 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int # Get line size by font size line_width = 60 if font_size <= 12: - line_width = 80 + line_width = 75 elif font_size <= 15: - line_width = 60 + line_width = 58 elif font_size <= 20: line_width = 40 elif font_size <= 40: From 168941a0ad315e79763155a4a449bf9632002fa9 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sat, 1 Oct 2022 16:25:46 +0100 Subject: [PATCH 07/57] Removed really long satoshi quotes --- lnbits/extensions/gerty/static/satoshi.json | 839 ------------- .../extensions/gerty/static/satoshi_long.json | 1099 +++++++++++++++++ 2 files changed, 1099 insertions(+), 839 deletions(-) create mode 100644 lnbits/extensions/gerty/static/satoshi_long.json diff --git a/lnbits/extensions/gerty/static/satoshi.json b/lnbits/extensions/gerty/static/satoshi.json index 1cff822aa..b28697370 100644 --- a/lnbits/extensions/gerty/static/satoshi.json +++ b/lnbits/extensions/gerty/static/satoshi.json @@ -6,76 +6,6 @@ "post_id": "542", "date": "December 11, 2010" }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "The project needs to grow gradually so the software can be strengthened along the way. I make this appeal to WikiLeaks not to try to use Bitcoin. Bitcoin is a small beta community in its infancy.", - "post_id": "523", - "date": "December 5, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "I'm happy if someone with artistic skill wants to contribute alternatives. The icon/logo was meant to be good as an icon at the 16x16 and 20x20 pixel sizes. I think it's the best program icon, but there's room for improvement at larger sizes for a graphic for use on websites. It'll be a lot simpler if authors could make their graphics public domain.", - "post_id": "500", - "date": "November 13, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "I wish rather than deleting the article, they put a length restriction. If something is not famous enough, there could at least be a stub article identifying what it is. I often come across annoying red links of things that Wiki ought to at least have heard of. \nThe article could be as simple as something like: \"Bitcoin is a peer-to-peer decentralised /link/electronic currency/link/.\" \nThe more standard Wiki thing to do is that we should have a paragraph in one of the more general categories that we are an instance of, like Electronic Currency or Electronic Cash. We can probably establish a paragraph there. Again, keep it short. Just identifying what it is.", - "post_id": "467", - "date": "September 30, 2010" - }, - { - "category": "transactions", - "medium": "bitcointalk", - "text": "As you figured out, the root problem is we shouldn't be counting or spending transactions until they have at least 1 confirmation. 0/unconfirmed transactions are very much second class citizens. At most, they are advice that something has been received, but counting them as balance or spending them is premature.", - "post_id": "464", - "date": "September 30, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "Bitcoin would be convenient for people who don't have a credit card or don't want to use the cards they have, either don't want the spouse to see it on the bill or don't trust giving their number to \"porn guys\", or afraid of recurring billing.", - "post_id": "460", - "date": "September 23, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "I don't know anything about any of the bug trackers. If we were to have one, we would have to make a thoroughly researched choice. We're managing pretty well just using the forum. I'm more likely to see bugs posted in the forum, and I think other users are much more likely to help resolve and ask follow up questions here than if they were in a bug tracker. A key step is other users helping resolve the simple stuff that's not really a bug but some misunderstanding or confusion. I keep a list of all unresolved bugs I've seen on the forum. In some cases, I'm still thinking about the best design for the fix. This isn't the kind of software where we can leave so many unresolved bugs that we need a tracker for them.", - "post_id": "454", - "date": "September 19, 2010" - }, - { - "category": "scalability", - "medium": "bitcointalk", - "text": "The threshold can easily be changed in the future. We can decide to increase it when the time comes. It's a good idea to keep it lower as a circuit breaker and increase it as needed. If we hit the threshold now, it would almost certainly be some kind of flood and not actual use. Keeping the threshold lower would help limit the amount of wasted disk space in that event.", - "post_id": "441", - "date": "September 8, 2010" - }, - { - "category": "fees", - "medium": "bitcointalk", - "text": "Currently, paying a fee is controlled manually with the -paytxfee switch. It would be very easy to make the software automatically check the size of recent blocks to see if it should pay a fee. We're so far from reaching the threshold, we don't need that yet. It's a good idea to see how things go with controlling it manually first anyway.", - "post_id": "441", - "date": "September 8, 2010" - }, - { - "category": "fees, nodes", - "medium": "bitcointalk", - "text": "Another option is to reduce the number of free transactions allowed per block before transaction fees are required. Nodes only take so many KB of free transactions per block before they start requiring at least 0.01 transaction fee. The threshold should probably be lower than it currently is. I don't think the threshold should ever be 0. We should always allow at least some free transactions.", - "post_id": "439", - "date": "September 7, 2010" - }, - { - "category": "economics", - "medium": "bitcointalk", - "text": "As a thought experiment, imagine there was a base metal as scarce as gold but with the following properties:\n- boring grey in colour\n- not a good conductor of electricity\n- not particularly strong, but not ductile or easily malleable either\n- not useful for any practical or ornamental purpose\n\nand one special, magical property:\n- can be transported over a communications channel\n\nIf it somehow acquired any value at all for whatever reason, then anyone wanting to transfer wealth over a long distance could buy some, transmit it, and have the recipient sell it.\n\nMaybe it could get an initial value circularly as you've suggested, by people foreseeing its potential usefulness for exchange. (I would definitely want some) Maybe collectors, any random reason could spark it.\n\nI think the traditional qualifications for money were written with the assumption that there are so many competing objects in the world that are scarce, an object with the automatic bootstrap of intrinsic value will surely win out over those without intrinsic value. But if there were nothing in the world with intrinsic value that could be used as money, only scarce but no intrinsic value, I think people would still take up something.\n\n(I'm using the word scarce here to only mean limited potential supply)", - "post_id": "428", - "date": "August 27, 2010" - }, { "category": "bitcoin-economics", "medium": "bitcointalk", @@ -83,104 +13,6 @@ "post_id": "427", "date": "August 27, 2010" }, - { - "category": "proof-of-work", - "medium": "bitcointalk", - "text": "There is no way for the software to automatically know if one chain is better than another except by the greatest proof-of-work. In the design it was necessary for it to switch to a longer chain no matter how far back it has to go.", - "post_id": "394", - "date": "August 16, 2010" - }, - { - "category": "mining", - "medium": "bitcointalk", - "text": "Some places where generation will gravitate to: \n1) places where it's cheapest or free\n2) people who want to help for idealogical reasons\n3) people who want to get some coins without the inconvenience of doing a transaction to buy them\n\nThere are legitimate places where it's free. Generation is basically free anywhere that has electric heat, since your computer's heat is offsetting your baseboard electric heating. Many small flats have electric heat out of convenience.", - "post_id": "364", - "date": "August 15, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "Then you must also be against the common system of payment up front, where the customer loses.\nPayment up front: customer loses, and the thief gets the money.\nSimple escrow: customer loses, but the thief doesn't get the money either.\nAre you guys saying payment up front is better, because at least the thief gets the money, so at least someone gets it?\nImagine someone stole something from you. You can't get it back, but if you could, if it had a kill switch that could be remote triggered, would you do it? Would it be a good thing for thieves to know that everything you own has a kill switch and if they steal it, it'll be useless to them, although you still lose it too? If they give it back, you can re-activate it.\nImagine if gold turned to lead when stolen. If the thief gives it back, it turns to gold again.\nIt still seems to me the problem may be one of presenting it the right way. For one thing, not being so blunt about \"money burning\" for the purposes of game theory discussion. The money is never truly burned. You have the option to release it at any time forever.", - "post_id": "340", - "date": "August 11, 2010" - }, - { - "category": "mining", - "medium": "bitcointalk", - "text": "The heat from your computer is not wasted if you need to heat your home. If you're using electric heat where you live, then your computer's heat isn't a waste. It's equal cost if you generate the heat with your computer. \nIf you have other cheaper heating than electric, then the waste is only the difference in cost.\nIf it's summer and you're using A/C, then it's twice. \nBitcoin generation should end up where it's cheapest. Maybe that will be in cold climates where there's electric heat, where it would be essentially free.", - "post_id": "337", - "date": "August 9, 2010" - }, - { - "category": "bitcoin-economics", - "medium": "bitcointalk", - "text": "It's the same situation as gold and gold mining. The marginal cost of gold mining tends to stay near the price of gold. Gold mining is a waste, but that waste is far less than the utility of having gold available as a medium of exchange. \nI think the case will be the same for Bitcoin. The utility of the exchanges made possible by Bitcoin will far exceed the cost of electricity used. Therefore, not having Bitcoin would be the net waste.", - "post_id": "327", - "date": "August 7, 2010" - }, - { - "category": "proof-of-work", - "medium": "bitcointalk", - "text": "Proof-of-work has the nice property that it can be relayed through untrusted middlemen. We don't have to worry about a chain of custody of communication. It doesn't matter who tells you a longest chain, the proof-of-work speaks for itself.", - "post_id": "327", - "date": "August 7, 2010" - }, - { - "category": "micropayments", - "medium": "bitcointalk", - "text": "Forgot to add the good part about micropayments. While I don't think Bitcoin is practical for smaller micropayments right now, it will eventually be as storage and bandwidth costs continue to fall. If Bitcoin catches on on a big scale, it may already be the case by that time. Another way they can become more practical is if I implement client-only mode and the number of network nodes consolidates into a smaller number of professional server farms. Whatever size micropayments you need will eventually be practical. I think in 5 or 10 years, the bandwidth and storage will seem trivial.", - "post_id": "318", - "date": "August 5, 2010" - }, - { - "category": "micropayments", - "medium": "bitcointalk", - "text": "Bitcoin isn't currently practical for very small micropayments. Not for things like pay per search or per page view without an aggregating mechanism, not things needing to pay less than 0.01. The dust spam limit is a first try at intentionally trying to prevent overly small micropayments like that. \nBitcoin is practical for smaller transactions than are practical with existing payment methods. Small enough to include what you might call the top of the micropayment range. But it doesn't claim to be practical for arbitrarily small micropayments.", - "post_id": "317", - "date": "August 4, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "Actually, it works well to just PM me. I'm the one who's going to be fixing it. If you find a security flaw, I would definitely like to hear from you privately to fix it before it goes public.", - "post_id": "294", - "date": "July 29, 2010" - }, - { - "category": "nodes", - "medium": "bitcointalk", - "text": "The current system where every user is a network node is not the intended configuration for large scale. That would be like every Usenet user runs their own NNTP server. The design supports letting users just be users. The more burden it is to run a node, the fewer nodes there will be. Those few nodes will be big server farms. The rest will be client nodes that only do transactions and don't generate.", - "post_id": "287", - "date": "July 29, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "For future reference, here's my public key. It's the same one that's been there since the bitcoin.org site first went up in 2008. Grab it now in case you need it later. http://www.bitcoin.org/Satoshi_Nakamoto.asc", - "post_id": "276", - "date": "July 25, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "By making some adjustments to the database settings, I was able to make the initial block download about 5 times faster. It downloads in about 30 minutes. \n \nThe database default had it writing each block to disk synchronously, which is not necessary. I changed the settings to let it cache the changes in memory and write them out in a batch. Blocks are still written transactionally, so either the complete change occurs or none of it does, in either case the data is left in a valid state. \n \nI only enabled this change during the initial block download. When you come within 2000 blocks of the latest block, these changes turn off and it slows down to the old way.", - "post_id": "258", - "date": "July 23, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "The timing is strange, just as we are getting a rapid increase in 3rd party coverage after getting slashdotted. I hope there's not a big hurry to wrap the discussion and decide. How long does Wikipedia typically leave a question like that open for comment? \nIt would help to condense the article and make it less promotional sounding as soon as possible. Just letting people know what it is, where it fits into the electronic money space, not trying to convince them that it's good. They probably want something that just generally identifies what it is, not tries to explain all about how it works.", - "post_id": "249", - "date": "July 10, 2010" - }, - { - "category": "difficulty", - "medium": "bitcointalk", - "text": "Right, the difficulty adjustment is trying to keep it so the network as a whole generates an average of 6 blocks per hour. The time for your block to mature will always be around 20 hours.", - "post_id": "225", - "date": "July 16, 2010" - }, { "category": "difficulty", "medium": "bitcointalk", @@ -188,20 +20,6 @@ "post_id": "223", "date": "July 16, 2010" }, - { - "category": "scalability, nodes", - "medium": "bitcointalk", - "text": "The design outlines a lightweight client that does not need the full block chain. In the design PDF it's called Simplified Payment Verification. The lightweight client can send and receive transactions, it just can't generate blocks. It does not need to trust a node to verify payments, it can still verify them itself. \nThe lightweight client is not implemented yet, but the plan is to implement it when it's needed. For now, everyone just runs a full network node.", - "post_id": "188", - "date": "July 14, 2010" - }, - { - "category": "scalability, nodes", - "medium": "bitcointalk", - "text": "I anticipate there will never be more than 100K nodes, probably less. It will reach an equilibrium where it's not worth it for more nodes to join in. The rest will be lightweight clients, which could be millions.", - "post_id": "188", - "date": "July 14, 2010" - }, { "category": "nodes", "medium": "bitcointalk", @@ -209,20 +27,6 @@ "post_id": "188", "date": "July 14, 2010" }, - { - "category": "economics", - "medium": "bitcointalk", - "text": "When someone tries to buy all the world's supply of a scarce asset, the more they buy the higher the price goes. At some point, it gets too expensive for them to buy any more. It's great for the people who owned it beforehand because they get to sell it to the corner at crazy high prices. As the price keeps going up and up, some people keep holding out for yet higher prices and refuse to sell.", - "post_id": "174", - "date": "July 9, 2010" - }, - { - "category": "releases", - "medium": "bitcointalk", - "text": "Announcing version 0.3 of Bitcoin, the P2P cryptocurrency! Bitcoin is a digital currency using cryptography and a distributed network to replace the need for a trusted central server. Escape the arbitrary inflation risk of centrally managed currencies! Bitcoin's total circulation is limited to 21 million coins. The coins are gradually released to the network's nodes based on the CPU proof-of-worker they contribute, so you can get a share of them by contributing your idle CPU time.", - "post_id": "168", - "date": "July 6, 2010" - }, { "category": "general", "medium": "bitcointalk", @@ -237,34 +41,6 @@ "post_id": "131", "date": "June 21, 2010" }, - { - "category": "general", - "medium": "bitcointalk", - "text": "Excellent choice of a first project, nice work. I had planned to do this exact thing if someone else didn't do it, so when it gets too hard for mortals to generate 50BTC, new users could get some coins to play with right away. Donations should be able to keep it filled. The display showing the balance in the dispenser encourages people to top it up.\n\nYou should put a donation bitcoin address on the page for those who want to add funds to it, which ideally should update to a new address whenever it receives something.", - "post_id": "129", - "date": "June 18, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "Since 2007. At some point I became convinced there was a way to do this without any trust required at all and couldn't resist to keep thinking about it. Much more of the work was designing than coding.\n\nFortunately, so far all the issues raised have been things I previously considered and planned for.", - "post_id": "127", - "date": "June 18, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "The nature of Bitcoin is such that once version 0.1 was released, the core design was set in stone for the rest of its lifetime. Because of that, I wanted to design it to support every possible transaction type I could think of. The problem was, each thing required special support code and data fields whether it was used or not, and only covered one special case at a time. It would have been an explosion of special cases. The solution was script, which generalizes the problem so transacting parties can describe their transaction as a predicate that the node network evaluates. The nodes only need to understand the transaction to the extent of evaluating whether the sender's conditions are met.", - "post_id": "126", - "date": "June 17, 2010" - }, - { - "category": "transactions, bitcoin-design", - "medium": "bitcointalk", - "text": "The design supports a tremendous variety of possible transaction types that I designed years ago. Escrow transactions, bonded contracts, third party arbitration, multi-party signature, etc. If Bitcoin catches on in a big way, these are things we'll want to explore in the future, but they all had to be designed at the beginning to make sure they would be possible later.", - "post_id": "126", - "date": "June 17, 2010" - }, { "category": "encryption", "medium": "bitcointalk", @@ -272,13 +48,6 @@ "post_id": "119", "date": "June 14, 2010" }, - { - "category": "encryption", - "medium": "bitcointalk", - "text": "If SHA-256 became completely broken, I think we could come to some agreement about what the honest block chain was before the trouble started, lock that in and continue from there with a new hash function.", - "post_id": "119", - "date": "June 14, 2010" - }, { "category": "releases", "medium": "bitcointalk", @@ -286,34 +55,6 @@ "post_id": "111", "date": "May 26, 2010" }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "Simplified Payment Verification is for lightweight client-only users who only do transactions and don't generate and don't participate in the node network. They wouldn't need to download blocks, just the hash chain, which is currently about 2MB and very quick to verify (less than a second to verify the whole chain). If the network becomes very large, like over 100,000 nodes, this is what we'll use to allow common users to do transactions without being full blown nodes. At that stage, most users should start running client-only software and only the specialist server farms keep running full network nodes, kind of like how the usenet network has consolidated. \nSPV is not implemented yet, and won't be implemented until far in the future, but all the current implementation is designed around supporting it.", - "post_id": "105", - "date": "May 18, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "Bitcoin addresses you generate are kept forever. A bitcoin address must be kept to show ownership of anything sent to it. If you were able to delete a bitcoin address and someone sent to it, the money would be lost. They're only about 500 bytes.", - "post_id": "102", - "date": "May 16, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "When you generate a new bitcoin address, it only takes disk space on your own computer (like 500 bytes). It's like generating a new PGP private key, but less CPU intensive because it's ECC. The address space is effectively unlimited. It doesn't hurt anyone, so generate all you want.", - "post_id": "98", - "date": "May 16, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "The price of .com registrations is lower than it should be, therefore any good name you might think of is always already taken by some domain name speculator. Fortunately, it's standard for open source projects to be .org.", - "post_id": "94", - "date": "March 23, 2010" - }, { "category": "bitcoin-design", "medium": "bitcointalk", @@ -328,20 +69,6 @@ "post_id": "73", "date": "February 24, 2010" }, - { - "category": "economics", - "medium": "bitcointalk", - "text": "A rational market price for something that is expected to increase in value will already reflect the present value of the expected future increases. In your head, you do a probability estimate balancing the odds that it keeps increasing.", - "post_id": "65", - "date": "February 21, 2010" - }, - { - "category": "economics, bitcoin-economics", - "medium": "bitcointalk", - "text": "The price of any commodity tends to gravitate toward the production cost. If the price is below cost, then production slows down. If the price is above cost, profit can be made by generating and selling more. At the same time, the increased production would increase the difficulty, pushing the cost of generating towards the price.", - "post_id": "65", - "date": "February 21, 2010" - }, { "category": "bitcoin-economics", "medium": "bitcointalk", @@ -370,20 +97,6 @@ "post_id": "56", "date": "February 14, 2010" }, - { - "category": "bitcoin-economics, bitcoin-design", - "medium": "bitcointalk", - "text": "Eventually at most only 21 million coins for 6.8 billion people in the world if it really gets huge.\n\nBut don't worry, there are another 6 decimal places that aren't shown, for a total of 8 decimal places internally. It shows 1.00 but internally it's 1.00000000. If there's massive deflation in the future, the software could show more decimal places.", - "post_id": "46", - "date": "February 6, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "If it gets tiresome working with small numbers, we could change where the display shows the decimal point. Same amount of money, just different convention for where the \",\"'s and \".\"'s go. e.g. moving the decimal place 3 places would mean if you had 1.00000 before, now it shows it as 1,000.00.", - "post_id": "46", - "date": "February 6, 2010" - }, { "category": "privacy", "medium": "bitcointalk", @@ -398,41 +111,6 @@ "post_id": "45", "date": "February 6, 2010" }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "I very much wanted to find some way to include a short message, but the problem is, the whole world would be able to see the message. As much as you may keep reminding people that the message is completely non-private, it would be an accident waiting to happen.", - "post_id": "33", - "date": "January 28, 2010" - }, - { - "category": "mining", - "medium": "bitcointalk", - "text": "The average total coins generated across the network per day stays the same. Faster machines just get a larger share than slower machines. If everyone bought faster machines, they wouldn't get more coins than before.", - "post_id": "20", - "date": "December 12, 2009" - }, - { - "category": "mining", - "medium": "bitcointalk", - "text": "We should have a gentleman's agreement to postpone the GPU arms race as long as we can for the good of the network. It's much easer to get new users up to speed if they don't have to worry about GPU drivers and compatibility. It's nice how anyone with just a CPU can compete fairly equally right now.", - "post_id": "20", - "date": "December 12, 2009" - }, - { - "category": "bitcoin-economics", - "medium": "bitcointalk", - "text": "Those coins can never be recovered, and the total circulation is less. Since the effective circulation is reduced, all the remaining coins are worth slightly more. It's the opposite of when a government prints money and the value of existing money goes down.", - "post_id": "17", - "date": "December 10, 2009" - }, - { - "category": "trusted-third-parties", - "text": "Being open source means anyone can independently review the code. If it was closed source, nobody could verify the security. I think it's essential for a program of this nature to be open source.", - "medium": "bitcointalk", - "post_id": "17", - "date": "December 10, 2009" - }, { "category": "privacy, transactions", "medium": "bitcointalk", @@ -440,20 +118,6 @@ "post_id": "11", "date": "November 25, 2009" }, - { - "category": "mining", - "medium": "bitcointalk", - "text": "Think of it as a cooperative effort to make a chain. When you add a link, you must first find the current end of the chain. If you were to locate the last link, then go off for an hour and forge your link, come back and link it to the link that was the end an hour ago, others may have added several links since then and they're not going to want to use your link that now branches off the middle.", - "post_id": "8", - "date": "November 22, 2009" - }, - { - "category": "bitcoin-design", - "medium": "p2pfoundation", - "text": "It is a global distributed database, with additions to the database by consent of the majority, based on a set of rules they follow: \n\n- Whenever someone finds proof-of-work to generate a block, they get some new coins\n- The proof-of-work difficulty is adjusted every two weeks to target an average of 6 blocks per hour (for the whole network)\n- The coins given per block is cut in half every 4 years", - "post_id": "3", - "date": "February 18, 2009" - }, { "category": "bitcoin-economics", "medium": "p2pfoundation", @@ -461,55 +125,6 @@ "post_id": "3", "date": "February 18, 2009" }, - { - "category": "bitcoin-economics", - "medium": "p2pfoundation", - "text": "To Sepp's question, indeed there is nobody to act as central bank or federal reserve to adjust the money supply as the population of users grows. That would have required a trusted party to determine the value, because I don't know a way for software to know the real world value of things.", - "post_id": "3", - "date": "February 18, 2009" - }, - { - "category": "bitcoin-economics", - "medium": "p2pfoundation", - "text": "In this sense, it's more typical of a precious metal. Instead of the supply changing to keep the value the same, the supply is predetermined and the value changes. As the number of users grows, the value per coin increases. It has the potential for a positive feedback loop; as users increase, the value goes up, which could attract more users to take advantage of the increasing value.", - "post_id": "3", - "date": "February 18, 2009" - }, - { - "category": "cryptocurrency", - "medium": "p2pfoundation", - "text": "A lot of people automatically dismiss e-currency as a lost cause because of all the companies that failed since the 1990's. I hope it's obvious it was only the centrally controlled nature of those systems that doomed them. I think this is the first time we're trying a decentralized, non-trust-based system.", - "post_id": "2", - "date": "February 15, 2009" - }, - { - "category": "releases, bitcoin-design", - "medium": "p2pfoundation", - "text": "I've developed a new open source P2P e-cash system called Bitcoin. It's completely decentralized, with no central server or trusted parties, because everything is based on crypto proof instead of trust. Give it a try, or take a look at the screenshots and design paper: \n\nDownload Bitcoin v0.1 at http://www.bitcoin.org", - "post_id": "1", - "date": "February 11, 2009" - }, - { - "category": "economics", - "medium": "p2pfoundation", - "text": "The root problem with conventional currency is all the trust that's required to make it work. The central bank must be trusted not to debase the currency, but the history of fiat currencies is full of breaches of that trust.", - "post_id": "1", - "date": "February 11, 2009" - }, - { - "category": "micropayments, privacy, banks", - "medium": "p2pfoundation", - "text": "Banks must be trusted to hold our money and transfer it electronically, but they lend it out in waves of credit bubbles with barely a fraction in reserve. We have to trust them with our privacy, trust them not to let identity thieves drain our accounts. Their massive overhead costs make micropayments impossible.", - "post_id": "1", - "date": "February 11, 2009" - }, - { - "category": "encryption", - "medium": "p2pfoundation", - "text": "A generation ago, multi-user time-sharing computer systems had a similar problem. Before strong encryption, users had to rely on password protection to secure their files, placing trust in the system administrator to keep their information private. Privacy could always be overridden by the admin based on his judgment call weighing the principle of privacy against other concerns, or at the behest of his superiors. Then strong encryption became available to the masses, and trust was no longer required. Data could be secured in a way that was physically impossible for others to access, no matter for what reason, no matter how good the excuse, no matter what.", - "post_id": "1", - "date": "February 11, 2009" - }, { "category": "cryptocurrency", "medium": "p2pfoundation", @@ -517,27 +132,6 @@ "post_id": "1", "date": "February 11, 2009" }, - { - "category": "transactions", - "medium": "p2pfoundation", - "text": "A digital coin contains the public key of its owner. To transfer it, the owner signs the coin together with the public key of the next owner. Anyone can check the signatures to verify the chain of ownership.", - "post_id": "1", - "date": "February 11, 2009" - }, - { - "category": "double-spending", - "medium": "p2pfoundation", - "text": "Any owner could try to re-spend an already spent coin by signing it again to another owner. The usual solution is for a trusted company with a central database to check for double-spending, but that just gets back to the trust model. In its central position, the company can override the users, and the fees needed to support the company make micropayments impractical. \nBitcoin's solution is to use a peer-to-peer network to check for double-spending. In a nutshell, the network works like a distributed timestamp server, stamping the first transaction to spend a coin. It takes advantage of the nature of information being easy to spread but hard to stifle.", - "post_id": "1", - "date": "February 11, 2009" - }, - { - "category": "bitcoin-design", - "medium": "p2pfoundation", - "text": "The result is a distributed system with no single point of failure. Users hold the crypto keys to their own money and transact directly with each other, with the help of the P2P network to check for double-spending.", - "post_id": "1", - "date": "February 11, 2009" - }, { "category": "identity", "medium": "p2pfoundation", @@ -552,20 +146,6 @@ "text": "I've been working on a new electronic cash system that's fully peer-to-peer, with no trusted third party.", "date": "November 1, 2008" }, - { - "category": "bitcoin-design", - "medium": "email", - "email_id": "1", - "text": "The main properties: \n Double-spending is prevented with a peer-to-peer network.\n No mint or other trusted parties.\n Participants can be anonymous.\n New coins are made from Hashcash style proof-of-work.\n The proof-of-work for new coin generation also proof-of-workers the network to prevent double-spending.", - "date": "November 1, 2008" - }, - { - "category": "double-spending", - "medium": "email", - "email_id": "2", - "text": "Long before the network gets anywhere near as large as that, it would be safe for users to use Simplified Payment Verification (section 8) to check for double spending, which only requires having the chain of block headers, or about 12KB per day.", - "date": "November 2, 2008" - }, { "category": "nodes", "medium": "email", @@ -573,13 +153,6 @@ "text": "Only people trying to create new coins would need to run network nodes.", "date": "November 2, 2008" }, - { - "category": "nodes", - "medium": "email", - "email_id": "2", - "text": "At first, most users would run network nodes, but as the network grows beyond a certain point, it would be left more and more to specialists with server farms of specialized hardware. A server farm would only need to have one node on the network and the rest of the LAN connects with that one node.", - "date": "November 2, 2008" - }, { "category": "mining", "medium": "email", @@ -587,20 +160,6 @@ "text": "The requirement is that the good guys collectively have more CPU proof-of-worker than any single attacker.", "date": "November 3, 2008" }, - { - "category": "mining", - "medium": "email", - "email_id": "3", - "text": "There would be many smaller zombie farms that are not big enough to overproof-of-worker the network, and they could still make money by generating bitcoins. The smaller farms are then the \"honest nodes\". (I need a better term than \"honest\") The more smaller farms resort to generating bitcoins, the higher the bar gets to overproof-of-worker the network, making larger farms also too small to overproof-of-worker it so that they may as well generate bitcoins too. According to the \"long tail\" theory, the small, medium and merely large farms put together should add up to a lot more than the biggest zombie farm.", - "date": "November 3, 2008" - }, - { - "category": "mining", - "medium": "email", - "email_id": "3", - "text": "Even if a bad guy does overproof-of-worker the network, it's not like he's instantly rich. All he can accomplish is to take back money he himself spent, like bouncing a check. To exploit it, he would have to buy something from a merchant, wait till it ships, then overproof-of-worker the network and try to take his money back. I don't think he could make as much money trying to pull a carding scheme like that as he could by generating bitcoins. With a zombie farm that big, he could generate more bitcoins than everyone else combined.", - "date": "November 3, 2008" - }, { "category": "mining", "medium": "email", @@ -622,62 +181,6 @@ "text": "Governments are good at cutting off the heads of a centrally controlled networks like Napster, but pure P2P networks like Gnutella and Tor seem to be holding their own.", "date": "November 7, 2008" }, - { - "category": "mining, difficulty", - "medium": "email", - "email_id": "5", - "text": "As computers get faster and the total computing proof-of-worker applied to creating bitcoins increases, the difficulty increases proportionally to keep the total new production constant. Thus, it is known in advance how many new bitcoins will be created every year in the future.", - "date": "November 8, 2008" - }, - { - "category": "bitcoin-economics", - "medium": "email", - "email_id": "5", - "text": "The fact that new coins are produced means the money supply increases by a planned amount, but this does not necessarily result in inflation. If the supply of money increases at the same rate that the number of people using it increases, prices remain stable. If it does not increase as fast as demand, there will be deflation and early holders of money will see its value increase. Coins have to get initially distributed somehow, and a constant rate seems like the best formula.", - "date": "November 8, 2008" - }, - { - "category": "nodes", - "medium": "email", - "email_id": "6", - "text": "Right, nodes keep transactions in their working set until they get into a block. If a transaction reaches 90% of nodes, then each time a new block is found, it has a 90% chance of being in it.", - "date": "November 9, 2008" - }, - { - "category": "transactions", - "medium": "email", - "email_id": "6", - "text": "Receivers of transactions will normally need to hold transactions for perhaps an hour or more to allow time for this kind of possibility to be resolved. They can still re-spend the coins immediately, but they should wait before taking an action such as shipping goods.", - "date": "November 9, 2008" - }, - { - "category": "double-spending", - "medium": "email", - "email_id": "6", - "text": "The attacker isn't adding blocks to the end. He has to go back and redo the block his transaction is in and all the blocks after it, as well as any new blocks the network keeps adding to the end while he's doing that. He's rewriting history. Once his branch is longer, it becomes the new valid one.", - "date": "November 9, 2008" - }, - { - "category": "nodes, mining, proof-of-work", - "medium": "email", - "email_id": "6", - "text": "It is strictly necessary that the longest chain is always considered the valid one. Nodes that were present may remember that one branch was there first and got replaced by another, but there would be no way for them to convince those who were not present of this. We can't have subfactions of nodes that cling to one branch that they think was first, others that saw another branch first, and others that joined later and never saw what happened. The CPU proof-of-worker proof-of-work vote must have the final say. The only way for everyone to stay on the same page is to believe that the longest chain is always the valid one, no matter what.", - "date": "November 9, 2008" - }, - { - "category": "transactions", - "medium": "email", - "email_id": "6", - "text": "The recipient just needs to verify it back to a depth that is sufficiently far back in the block chain, which will often only require a depth of 2 transactions. All transactions before that can be discarded.", - "date": "November 9, 2008" - }, - { - "category": "nodes", - "medium": "email", - "email_id": "6", - "text": "When a node receives a block, it checks the signatures of every transaction in it against previous transactions in blocks. Blocks can only contain transactions that depend on valid transactions in previous blocks or the same block. Transaction C could depend on transaction B in the same block and B depends on transaction A in an earlier block.", - "date": "November 9, 2008" - }, { "category": "transactions", "medium": "email", @@ -692,34 +195,6 @@ "text": "The proof-of-work chain is the solution to the synchronisation problem, and to knowing what the globally shared view is without having to trust anyone.", "date": "November 9, 2008" }, - { - "category": "nodes", - "medium": "email", - "email_id": "8", - "text": "A transaction will quickly propagate throughout the network, so if two versions of the same transaction were reported at close to the same time, the one with the head start would have a big advantage in reaching many more nodes first. Nodes will only accept the first one they see, refusing the second one to arrive, so the earlier transaction would have many more nodes working on incorporating it into the next proof-of-work. In effect, each node votes for its viewpoint of which transaction it saw first by including it in its proof-of-work effort. If the transactions did come at exactly the same time and there was an even split, it's a toss up based on which gets into a proof-of-work first, and that decides which is valid.", - "date": "November 9, 2008" - }, - { - "category": "nodes, proof-of-work", - "medium": "email", - "email_id": "8", - "text": "When a node finds a proof-of-work, the new block is propagated throughout the network and everyone adds it to the chain and starts working on the next block after it. Any nodes that had the other transaction will stop trying to include it in a block, since it's now invalid according to the accepted chain.", - "date": "November 9, 2008" - }, - { - "category": "proof-of-work", - "medium": "email", - "email_id": "8", - "text": "The proof-of-work chain is itself self-evident proof that it came from the globally shared view. Only the majority of the network together has enough CPU proof-of-worker to generate such a difficult chain of proof-of-work. Any user, upon receiving the proof-of-work chain, can see what the majority of the network has approved. Once a transaction is hashed into a link that's a few links back in the chain, it is firmly etched into the global history.", - "date": "November 9, 2008" - }, - { - "category": "fees, bitcoin-economics", - "medium": "email", - "email_id": "9", - "text": "If you're having trouble with the inflation issue, it's easy to tweak it for transaction fees instead. It's as simple as this: let the output value from any transaction be 1 cent less than the input value. Either the client software automatically writes transactions for 1 cent more than the intended payment value, or it could come out of the payee's side. The incentive value when a node finds a proof-of-work for a block could be the total of the fees in the block.", - "date": "November 10, 2008" - }, { "category": "double-spending", "medium": "email", @@ -734,27 +209,6 @@ "text": "The receiver of a payment must wait an hour or so before believing that it's valid. The network will resolve any possible double-spend races by then.", "date": "November 11, 2008" }, - { - "category": "double-spending", - "medium": "email", - "email_id": "10", - "text": "The guy who received the double-spend that became invalid never thought he had it in the first place. His software would have shown the transaction go from \"unconfirmed\" to \"invalid\". If necessary, the UI can be made to hide transactions until they're sufficiently deep in the block chain.", - "date": "November 11, 2008" - }, - { - "category": "difficulty", - "medium": "email", - "email_id": "10", - "text": "The target time between blocks will probably be 10 minutes. Every block includes its creation time. If the time is off by more than 36 hours, other nodes won't work on it. If the timespan over the last 6*24*30 blocks is less than 15 days, blocks are being generated too fast and the proof-of-work difficulty doubles. Everyone does the same calculation with the same chain data, so they all get the same result at the same link in the chain.", - "date": "November 11, 2008" - }, - { - "category": "transactions", - "medium": "email", - "email_id": "10", - "text": "Instantant non-repudiability is not a feature, but it's still much faster than existing systems. Paper cheques can bounce up to a week or two later. Credit card transactions can be contested up to 60 to 180 days later. Bitcoin transactions can be sufficiently irreversible in an hour or two.", - "date": "November 11, 2008" - }, { "category": "nodes", "medium": "email", @@ -762,20 +216,6 @@ "text": "With the transaction fee based incentive system I recently posted, nodes would have an incentive to include all the paying transactions they receive.", "date": "November 11, 2008" }, - { - "category": "proof-of-work", - "medium": "email", - "email_id": "11", - "text": "The proof-of-work chain is a solution to the Byzantine Generals' Problem. I'll try to rephrase it in that context.\nA number of Byzantine Generals each have a computer and want to attack the King's wi-fi by brute forcing the password, which they've learned is a certain number of characters in length. Once they stimulate the network to generate a packet, they must crack the password within a limited time to break in and erase the logs, otherwise they will be discovered and get in trouble. They only have enough CPU proof-of-worker to crack it fast enough if a majority of them attack at the same time. \n They don't particularly care when the attack will be, just that they all agree. It has been decided that anyone who feels like it will announce a time, and whatever time is heard first will be the official attack time. The problem is that the network is not instantaneous, and if two generals announce different attack times at close to the same time, some may hear one first and others hear the other first. They use a proof-of-work chain to solve the problem. Once each general receives whatever attack time he hears first, he sets his computer to solve an extremely difficult proof-of-work problem that includes the attack time in its hash. The proof-of-work is so difficult, it's expected to take 10 minutes of them all working at once before one of them finds a solution. Once one of the generals finds a proof-of-work, he broadcasts it to the network, and everyone changes their current proof-of-work computation to include that proof-of-work in the hash they're working on. If anyone was working on a different attack time, they switch to this one, because its proof-of-work chain is now longer.\n After two hours, one attack time should be hashed by a chain of 12 proofs-of-work. Every general, just by verifying the difficulty of the proof-of-work chain, can estimate how much parallel CPU proof-of-worker per hour was expended on it and see that it must have required the majority of the computers to produce that much proof-of-work in the allotted time. They had to all have seen it because the proof-of-work is proof that they worked on it. If the CPU proof-of-worker exhibited by the proof-of-work chain is sufficient to crack the password, they can safely attack at the agreed time.\n The proof-of-work chain is how all the synchronisation, distributed database and global view problems you've asked about are solved.", - "date": "November 13, 2008" - }, - { - "category": "nodes, mining", - "medium": "email", - "email_id": "12", - "text": "Broadcasts will probably be almost completely reliable. TCP transmissions are rarely ever dropped these days, and the broadcast protocol has a retry mechanism to get the data from other nodes after a while. If broadcasts turn out to be slower in practice than expected, the target time between blocks may have to be increased to avoid wasting resources. We want blocks to usually propagate in much less time than it takes to generate them, otherwise nodes would spend too much time working on obsolete blocks.", - "date": "November 14, 2008" - }, { "category": "motives", "medium": "email", @@ -790,41 +230,6 @@ "text": "I'll try and hurry up and release the sourcecode as soon as possible to serve as a reference to help clear up all these implementation questions.", "date": "November 17, 2008" }, - { - "category": "transactions", - "medium": "email", - "email_id": "13", - "text": "A basic transaction is just what you see in the figure in section 2. A signature (of the buyer) satisfying the public key of the previous transaction, and a new public key (of the seller) that must be satisfied to spend it the next time.", - "date": "November 17, 2008" - }, - { - "category": "double-spending", - "medium": "email", - "email_id": "13", - "text": "There's no need for reporting of \"proof of double spending\" like that. If the same chain contains both spends, then the block is invalid and rejected. \n Same if a block didn't have enough proof-of-work. That block is invalid and rejected. There's no need to circulate a report about it. Every node could see that and reject it before relaying it.", - "date": "November 17, 2008" - }, - { - "category": "double-spending", - "medium": "email", - "email_id": "13", - "text": "We're not \"on the lookout\" for double spends to sound the alarm and catch the cheater. We merely adjudicate which one of the spends is valid. Receivers of transactions must wait a few blocks to make sure that resolution has had time to complete. Would be cheaters can try and simultaneously double-spend all they want, and all they accomplish is that within a few blocks, one of the spends becomes valid and the others become invalid. Any later double-spends are immediately rejected once there's already a spend in the main chain.", - "date": "November 17, 2008" - }, - { - "category": "proof-of-work, mining", - "medium": "email", - "email_id": "13", - "text": "The proof-of-work is a Hashcash style SHA-256 collision finding. It's a memoryless process where you do millions of hashes a second, with a small chance of finding one each time. The 3 or 4 fastest nodes' dominance would only be proportional to their share of the total CPU proof-of-worker. Anyone's chance of finding a solution at any time is proportional to their CPU proof-of-worker.", - "date": "November 17, 2008" - }, - { - "category": "bitcoin-economics", - "medium": "email", - "email_id": "13", - "text": "There will be transaction fees, so nodes will have an incentive to receive and include all the transactions they can. Nodes will eventually be compensated by transaction fees alone when the total coins created hits the pre-determined ceiling.", - "date": "November 17, 2008" - }, { "category": "proof-of-work", "medium": "email", @@ -832,34 +237,6 @@ "text": "The credential that establishes someone as real is the ability to supply CPU proof-of-worker.", "date": "November 17, 2008" }, - { - "category": "double-spending", - "medium": "email", - "email_id": "14", - "text": "The race is to spread your transaction on the network first. Think 6 degrees of freedom -- it spreads exponentially. It would only take something like 2 minutes for a transaction to spread widely enough that a competitor starting late would have little chance of grabbing very many nodes before the first one is overtaking the whole network. During those 2 minutes, the merchant's nodes can be watching for a double-spent transaction. The double-spender would not be able to blast his alternate transaction out to the world without the merchant getting it, so he has to wait before starting. \n If the real transaction reaches 90% and the double-spent tx reaches 10%, the double-spender only gets a 10% chance of not paying, and 90% chance his money gets spent. For almost any type of goods, that's not going to be worth it for the scammer.", - "date": "November 17, 2008" - }, - { - "category": "transactions", - "medium": "email", - "email_id": "14", - "text": "If a merchant actually has a problem with theft, they can make the customer wait 2 minutes, or wait for something in e-mail, which many already do. If they really want to optimize, and it's a large download, they could cancel the download in the middle if the transaction comes back double-spent. If it's website access, typically it wouldn't be a big deal to let the customer have access for 5 minutes and then cut off access if it's rejected. Many such sites have a free trial anyway.", - "date": "November 17, 2008" - }, - { - "category": "releases, bitcoin-design", - "medium": "email", - "email_id": "15", - "text": "I believe I've worked through all those little details over the last year and a half while coding it, and there were a lot of them. The functional details are not covered in the paper, but the sourcecode is coming soon. I sent you the main files. (available by request at the moment, full release soon)", - "date": "November 17, 2008" - }, - { - "category": "releases", - "medium": "email", - "email_id": "16", - "text": "Announcing the first release of Bitcoin, a new electronic cash system that uses a peer-to-peer network to prevent double-spending. It's completely decentralized with no server or central authority.", - "date": "January 9, 2009" - }, { "category": "nodes", "medium": "email", @@ -867,41 +244,6 @@ "text": "If you can keep a node running that accepts incoming connections, you'll really be helping the network a lot. Port 8333 on your firewall needs to be open to receive incoming connections.", "date": "January 9, 2009" }, - { - "category": "mining", - "medium": "email", - "email_id": "16", - "text": "You can get coins by getting someone to send you some, or turn on Options->Generate Coins to run a node and generate blocks. I made the proof-of-work difficulty ridiculously easy to start with, so for a little while in the beginning a typical PC will be able to generate coins in just a few hours. It'll get a lot harder when competition makes the automatic adjustment drive up the difficulty. Generated coins must wait 120 blocks to mature before they can be spent.", - "date": "January 9, 2009" - }, - { - "category": "transactions", - "medium": "email", - "email_id": "16", - "text": "There are two ways to send money. If the recipient is online, you can enter their IP address and it will connect, get a new public key and send the transaction with comments. If the recipient is not online, it is possible to send to their Bitcoin address, which is a hash of their public key that they give you. They'll receive the transaction the next time they connect and get the block it's in. This method has the disadvantage that no comment information is sent, and a bit of privacy may be lost if the address is used multiple times, but it is a useful alternative if both users can't be online at the same time or the recipient can't receive incoming connections.", - "date": "January 9, 2009" - }, - { - "category": "bitcoin-economics", - "medium": "email", - "email_id": "16", - "text": "Total circulation will be 21,000,000 coins. It'll be distributed to network nodes when they make blocks, with the amount cut in half every 4 years.\n\nfirst 4 years: 10,500,000 coins\nnext 4 years: 5,250,000 coins\nnext 4 years: 2,625,000 coins\nnext 4 years: 1,312,500 coins\netc...\n\nWhen that runs out, the system can support transaction fees if needed. It's based on open market competition, and there will probably always be nodes willing to process transactions for free.", - "date": "January 9, 2009" - }, - { - "category": "cryptocurrency", - "medium": "email", - "email_id": "17", - "text": "I would be surprised if 10 years from now we're not using electronic currency in some way, now that we know a way to do it that won't inevitably get dumbed down when the trusted third party gets cold feet.", - "date": "January 17, 2009" - }, - { - "category": "micropayments", - "medium": "email", - "email_id": "17", - "text": "It can already be used for pay-to-send e-mail. The send dialog is resizeable and you can enter as long of a message as you like. It's sent directly when it connects. The recipient doubleclicks on the transaction to see the full message. If someone famous is getting more e-mail than they can read, but would still like to have a way for fans to contact them, they could set up Bitcoin and give out the IP address on their website. \"Send X bitcoins to my priority hotline at this IP and I'll read the message personally.\"", - "date": "January 17, 2009" - }, { "category": "micropayments", "medium": "email", @@ -909,191 +251,10 @@ "text": "Subscription sites that need some extra proof-of-work for their free trial so it doesn't cannibalize subscriptions could charge bitcoins for the trial.", "date": "January 17, 2009" }, - { - "category": "micropayments, bitcoin-economics", - "medium": "email", - "email_id": "17", - "text": "It might make sense just to get some in case it catches on. If enough people think the same way, that becomes a self fulfilling prophecy. Once it gets bootstrapped, there are so many applications if you could effortlessly pay a few cents to a website as easily as dropping coins in a vending machine.", - "date": "January 17, 2009" - }, { "category": "cryptocurrency", "text": "A purely peer-to-peer version of electronic cash would allow online payments to be sent directly from one party to another without going through a financial institution.", "medium": "whitepaper", "date": "October 31, 2008" - }, - { - "category": "proof-of-work, double-spending", - "text": "We propose a solution to the double-spending problem using a peer-to-peer network. The network timestamps transactions by hashing them into an ongoing chain of hash-based proof-of-work, forming a record that cannot be changed without redoing the proof-of-work. The longest chain not only serves as proof of the sequence of events witnessed, but proof that it came from the largest pool of CPU proof-of-worker. As long as a majority of CPU proof-of-worker is controlled by nodes that are not cooperating to attack the network, they'll generate the longest chain and outpace attackers. The network itself requires minimal structure.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "trusted-third-parties", - "text": "Commerce on the Internet has come to rely almost exclusively on financial institutions serving as trusted third parties to process electronic payments. While the system works well enough for most transactions, it still suffers from the inherent weaknesses of the trust based model.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "trusted-third-parties", - "text": "Completely non-reversible transactions are not really possible, since financial institutions cannot avoid mediating disputes. The cost of mediation increases transaction costs, limiting the minimum practical transaction size and cutting off the possibility for small casual transactions, and there is a broader cost in the loss of ability to make non-reversible payments for non-reversible services. With the possibility of reversal, the need for trust spreads.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "trusted-third-parties, cryptocurrency", - "text": "What is needed is an electronic payment system based on cryptographic proof instead of trust, allowing any two willing parties to transact directly with each other without the need for a trusted third party. Transactions that are computationally impractical to reverse would protect sellers from fraud, and routine escrow mechanisms could easily be implemented to protect buyers.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "double-spending, proof-of-work", - "text": "In this paper, we propose a solution to the double-spending problem using a peer-to-peer distributed timestamp server to generate computational proof of the chronological order of transactions. The system is secure as long as honest nodes collectively control more CPU proof-of-worker than any cooperating group of attacker nodes.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "transactions", - "text": "We define an electronic coin as a chain of digital signatures. Each owner transfers the coin to the next by digitally signing a hash of the previous transaction and the public key of the next owner and adding these to the end of the coin. A payee can verify the signatures to verify the chain of ownership.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "economics, double-spending", - "text": "The problem of course is the payee can't verify that one of the owners did not double-spend the coin. A common solution is to introduce a trusted central authority, or mint, that checks every transaction for double spending. After each transaction, the coin must be returned to the mint to issue a new coin, and only coins issued directly from the mint are trusted not to be double-spent. The problem with this solution is that the fate of the entire money system depends on the company running the mint, with every transaction having to go through them, just like a bank.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "nodes, cryptocurrency, transactions", - "text": "We need a way for the payee to know that the previous owners did not sign any earlier transactions. For our purposes, the earliest transaction is the one that counts, so we don't care about later attempts to double-spend. The only way to confirm the absence of a transaction is to be aware of all transactions. In the mint based model, the mint was aware of all transactions and decided which arrived first. To accomplish this without a trusted party, transactions must be publicly announced, and we need a system for participants to agree on a single history of the order in which they were received. The payee needs proof that at the time of each transaction, the majority of nodes agreed it was the first received.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "transactions", - "text": "The solution we propose begins with a timestamp server. A timestamp server works by taking a hash of a block of items to be timestamped and widely publishing the hash, such as in a newspaper or Usenet post. The timestamp proves that the data must have existed at the time, obviously, in order to get into the hash. Each timestamp includes the previous timestamp in its hash, forming a chain, with each additional timestamp reinforcing the ones before it.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "proof-of-work", - "text": "To implement a distributed timestamp server on a peer-to-peer basis, we will need to use a proof-of-work system similar to Adam Back's Hashcash, rather than newspaper or Usenet posts. The proof-of-work involves scanning for a value that when hashed, such as with SHA-256, the hash begins with a number of zero bits. The average work required is exponential in the number of zero bits required and can be verified by executing a single hash.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "proof-of-work", - "text": "For our timestamp network, we implement the proof-of-work by incrementing a nonce in the block until a value is found that gives the block's hash the required zero bits. Once the CPU effort has been expended to make it satisfy the proof-of-work, the block cannot be changed without redoing the work. As later blocks are chained after it, the work to change the block would include redoing all the blocks after it.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "proof-of-work", - "text": "The proof-of-work also solves the problem of determining representation in majority decision making. If the majority were based on one-IP-address-one-vote, it could be subverted by anyone able to allocate many IPs. Proof-of-work is essentially one-CPU-one-vote. The majority decision is represented by the longest chain, which has the greatest proof-of-work effort invested in it. If a majority of CPU proof-of-worker is controlled by honest nodes, the honest chain will grow the fastest and outpace any competing chains. To modify a past block, an attacker would have to redo the proof-of-work of the block and all blocks after it and then catch up with and surpass the work of the honest nodes. We will show later that the probability of a slower attacker catching up diminishes exponentially as subsequent blocks are added.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "proof-of-work, difficulty", - "text": "To compensate for increasing hardware speed and varying interest in running nodes over time, the proof-of-work difficulty is determined by a moving average targeting an average number of blocks per hour. If they're generated too fast, the difficulty increases.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "bitcoin-design, nodes, proof-of-work", - "text": "The steps to run the network are as follows:\n\n1. New transactions are broadcast to all nodes.\n2. Each node collects new transactions into a block.\n3. Each node works on finding a difficult proof-of-work for its block.\n4. When a node finds a proof-of-work, it broadcasts the block to all nodes.\n5. Nodes accept the block only if all transactions in it are valid and not already spent.\n6. Nodes express their acceptance of the block by working on creating the next block in the chain, using the hash of the accepted block as the previous hash.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "nodes, proof-of-work", - "text": "Nodes always consider the longest chain to be the correct one and will keep working on extending it. If two nodes broadcast different versions of the next block simultaneously, some nodes may receive one or the other first. In that case, they work on the first one they received, but save the other branch in case it becomes longer. The tie will be broken when the next proof-of-work is found and one branch becomes longer; the nodes that were working on the other branch will then switch to the longer one.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "transactions", - "text": "New transaction broadcasts do not necessarily need to reach all nodes. As long as they reach many nodes, they will get into a block before long. Block broadcasts are also tolerant of dropped messages. If a node does not receive a block, it will request it when it receives the next block and realizes it missed one.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "mining, bitcoin-economics", - "text": "By convention, the first transaction in a block is a special transaction that starts a new coin owned by the creator of the block. This adds an incentive for nodes to support the network, and provides a way to initially distribute coins into circulation, since there is no central authority to issue them. The steady addition of a constant of amount of new coins is analogous to gold miners expending resources to add gold to circulation. In our case, it is CPU time and electricity that is expended.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "fees, bitcoin-economics", - "text": "The incentive can also be funded with transaction fees. If the output value of a transaction is less than its input value, the difference is a transaction fee that is added to the incentive value of the block containing the transaction. Once a predetermined number of coins have entered circulation, the incentive can transition entirely to transaction fees and be completely inflation free.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "mining, bitcoin-economics", - "text": "The incentive may help encourage nodes to stay honest. If a greedy attacker is able to assemble more CPU proof-of-worker than all the honest nodes, he would have to choose between using it to defraud people by stealing back his payments, or using it to generate new coins. He ought to find it more profitable to play by the rules, such rules that favour him with more new coins than everyone else combined, than to undermine the system and the validity of his own wealth.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "bitcoin-design", - "text": "Once the latest transaction in a coin is buried under enough blocks, the spent transactions before it can be discarded to save disk space. To facilitate this without breaking the block's hash, transactions are hashed in a Merkle Tree, with only the root included in the block's hash. Old blocks can then be compacted by stubbing off branches of the tree. The interior hashes do not need to be stored.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "bitcoin-design", - "text": "A block header with no transactions would be about 80 bytes. If we suppose blocks are generated every 10 minutes, 80 bytes * 6 * 24 * 365 = 4.2MB per year. With computer systems typically selling with 2GB of RAM as of 2008, and Moore's Law predicting current growth of 1.2GB per year, storage should not be a problem even if the block headers must be kept in memory.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "bitcoin-design, nodes", - "text": "It is possible to verify payments without running a full network node. A user only needs to keep a copy of the block headers of the longest proof-of-work chain, which he can get by querying network nodes until he's convinced he has the longest chain, and obtain the Merkle branch linking the transaction to the block it's timestamped in. He can't check the transaction for himself, but by linking it to a place in the chain, he can see that a network node has accepted it, and blocks added after it further confirm the network has accepted it. \nAs such, the verification is reliable as long as honest nodes control the network, but is more vulnerable if the network is overproof-of-workered by an attacker. While network nodes can verify transactions for themselves, the simplified method can be fooled by an attacker's fabricated transactions for as long as the attacker can continue to overproof-of-worker the network. One strategy to protect against this would be to accept alerts from network nodes when they detect an invalid block, prompting the user's software to download the full block and alerted transactions to confirm the inconsistency. Businesses that receive frequent payments will probably still want to run their own nodes for more independent security and quicker verification.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "transactions, bitcoin-design", - "text": "Although it would be possible to handle coins individually, it would be unwieldy to make a separate transaction for every cent in a transfer. To allow value to be split and combined, transactions contain multiple inputs and outputs. Normally there will be either a single input from a larger previous transaction or multiple inputs combining smaller amounts, and at most two outputs: one for the payment, and one returning the change, if any, back to the sender.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "transactions", - "text": "It should be noted that fan-out, where a transaction depends on several transactions, and those transactions depend on many more, is not a problem here. There is never the need to extract a complete standalone copy of a transaction's history.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "transactions, privacy, trusted-third-parties", - "text": "The traditional banking model achieves a level of privacy by limiting access to information to the parties involved and the trusted third party. The necessity to announce all transactions publicly precludes this method, but privacy can still be maintained by breaking the flow of information in another place: by keeping public keys anonymous. The public can see that someone is sending an amount to someone else, but without information linking the transaction to anyone. This is similar to the level of information released by stock exchanges, where the time and size of individual trades, the \"tape\", is made public, but without telling who the parties were.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "addresses, privacy", - "text": "As an additional firewall, a new key pair should be used for each transaction to keep them from being linked to a common owner. Some linking is still unavoidable with multi-input transactions, which necessarily reveal that their inputs were owned by the same owner. The risk is that if the owner of a key is revealed, linking could reveal other transactions that belonged to the same owner.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "mining, proof-of-work", - "text": "We consider the scenario of an attacker trying to generate an alternate chain faster than the honest chain. Even if this is accomplished, it does not throw the system open to arbitrary changes, such as creating value out of thin air or taking money that never belonged to the attacker. Nodes are not going to accept an invalid transaction as payment, and honest nodes will never accept a block containing them. An attacker can only try to change one of his own transactions to take back money he recently spent.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "bitcoin-design, trusted-third-parties", - "text": "We have proposed a system for electronic transactions without relying on trust. We started with the usual framework of coins made from digital signatures, which provides strong control of ownership, but is incomplete without a way to prevent double-spending. To solve this, we proposed a peer-to-peer network using proof-of-work to record a public history of transactions that quickly becomes computationally impractical for an attacker to change if honest nodes control a majority of CPU proof-of-worker.", - "medium": "whitepaper", - "date": "October 31, 2008" - }, - { - "category": "nodes, mining", - "text": "The network is robust in its unstructured simplicity. Nodes work all at once with little coordination. They do not need to be identified, since messages are not routed to any particular place and only need to be delivered on a best effort basis. Nodes can leave and rejoin the network at will, accepting the proof-of-work chain as proof of what happened while they were gone. They vote with their CPU proof-of-worker, expressing their acceptance of valid blocks by working on extending them and rejecting invalid blocks by refusing to work on them. Any needed rules and incentives can be enforced with this consensus mechanism.", - "medium": "whitepaper", - "date": "October 31, 2008" } ] \ No newline at end of file diff --git a/lnbits/extensions/gerty/static/satoshi_long.json b/lnbits/extensions/gerty/static/satoshi_long.json new file mode 100644 index 000000000..1cff822aa --- /dev/null +++ b/lnbits/extensions/gerty/static/satoshi_long.json @@ -0,0 +1,1099 @@ +[ + { + "category": "general", + "medium": "bitcointalk", + "text": "It would have been nice to get this attention in any other context. WikiLeaks has kicked the hornet's nest, and the swarm is headed towards us.", + "post_id": "542", + "date": "December 11, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "The project needs to grow gradually so the software can be strengthened along the way. I make this appeal to WikiLeaks not to try to use Bitcoin. Bitcoin is a small beta community in its infancy.", + "post_id": "523", + "date": "December 5, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "I'm happy if someone with artistic skill wants to contribute alternatives. The icon/logo was meant to be good as an icon at the 16x16 and 20x20 pixel sizes. I think it's the best program icon, but there's room for improvement at larger sizes for a graphic for use on websites. It'll be a lot simpler if authors could make their graphics public domain.", + "post_id": "500", + "date": "November 13, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "I wish rather than deleting the article, they put a length restriction. If something is not famous enough, there could at least be a stub article identifying what it is. I often come across annoying red links of things that Wiki ought to at least have heard of. \nThe article could be as simple as something like: \"Bitcoin is a peer-to-peer decentralised /link/electronic currency/link/.\" \nThe more standard Wiki thing to do is that we should have a paragraph in one of the more general categories that we are an instance of, like Electronic Currency or Electronic Cash. We can probably establish a paragraph there. Again, keep it short. Just identifying what it is.", + "post_id": "467", + "date": "September 30, 2010" + }, + { + "category": "transactions", + "medium": "bitcointalk", + "text": "As you figured out, the root problem is we shouldn't be counting or spending transactions until they have at least 1 confirmation. 0/unconfirmed transactions are very much second class citizens. At most, they are advice that something has been received, but counting them as balance or spending them is premature.", + "post_id": "464", + "date": "September 30, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Bitcoin would be convenient for people who don't have a credit card or don't want to use the cards they have, either don't want the spouse to see it on the bill or don't trust giving their number to \"porn guys\", or afraid of recurring billing.", + "post_id": "460", + "date": "September 23, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "I don't know anything about any of the bug trackers. If we were to have one, we would have to make a thoroughly researched choice. We're managing pretty well just using the forum. I'm more likely to see bugs posted in the forum, and I think other users are much more likely to help resolve and ask follow up questions here than if they were in a bug tracker. A key step is other users helping resolve the simple stuff that's not really a bug but some misunderstanding or confusion. I keep a list of all unresolved bugs I've seen on the forum. In some cases, I'm still thinking about the best design for the fix. This isn't the kind of software where we can leave so many unresolved bugs that we need a tracker for them.", + "post_id": "454", + "date": "September 19, 2010" + }, + { + "category": "scalability", + "medium": "bitcointalk", + "text": "The threshold can easily be changed in the future. We can decide to increase it when the time comes. It's a good idea to keep it lower as a circuit breaker and increase it as needed. If we hit the threshold now, it would almost certainly be some kind of flood and not actual use. Keeping the threshold lower would help limit the amount of wasted disk space in that event.", + "post_id": "441", + "date": "September 8, 2010" + }, + { + "category": "fees", + "medium": "bitcointalk", + "text": "Currently, paying a fee is controlled manually with the -paytxfee switch. It would be very easy to make the software automatically check the size of recent blocks to see if it should pay a fee. We're so far from reaching the threshold, we don't need that yet. It's a good idea to see how things go with controlling it manually first anyway.", + "post_id": "441", + "date": "September 8, 2010" + }, + { + "category": "fees, nodes", + "medium": "bitcointalk", + "text": "Another option is to reduce the number of free transactions allowed per block before transaction fees are required. Nodes only take so many KB of free transactions per block before they start requiring at least 0.01 transaction fee. The threshold should probably be lower than it currently is. I don't think the threshold should ever be 0. We should always allow at least some free transactions.", + "post_id": "439", + "date": "September 7, 2010" + }, + { + "category": "economics", + "medium": "bitcointalk", + "text": "As a thought experiment, imagine there was a base metal as scarce as gold but with the following properties:\n- boring grey in colour\n- not a good conductor of electricity\n- not particularly strong, but not ductile or easily malleable either\n- not useful for any practical or ornamental purpose\n\nand one special, magical property:\n- can be transported over a communications channel\n\nIf it somehow acquired any value at all for whatever reason, then anyone wanting to transfer wealth over a long distance could buy some, transmit it, and have the recipient sell it.\n\nMaybe it could get an initial value circularly as you've suggested, by people foreseeing its potential usefulness for exchange. (I would definitely want some) Maybe collectors, any random reason could spark it.\n\nI think the traditional qualifications for money were written with the assumption that there are so many competing objects in the world that are scarce, an object with the automatic bootstrap of intrinsic value will surely win out over those without intrinsic value. But if there were nothing in the world with intrinsic value that could be used as money, only scarce but no intrinsic value, I think people would still take up something.\n\n(I'm using the word scarce here to only mean limited potential supply)", + "post_id": "428", + "date": "August 27, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "Bitcoins have no dividend or potential future dividend, therefore not like a stock.\n\nMore like a collectible or commodity.", + "post_id": "427", + "date": "August 27, 2010" + }, + { + "category": "proof-of-work", + "medium": "bitcointalk", + "text": "There is no way for the software to automatically know if one chain is better than another except by the greatest proof-of-work. In the design it was necessary for it to switch to a longer chain no matter how far back it has to go.", + "post_id": "394", + "date": "August 16, 2010" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "Some places where generation will gravitate to: \n1) places where it's cheapest or free\n2) people who want to help for idealogical reasons\n3) people who want to get some coins without the inconvenience of doing a transaction to buy them\n\nThere are legitimate places where it's free. Generation is basically free anywhere that has electric heat, since your computer's heat is offsetting your baseboard electric heating. Many small flats have electric heat out of convenience.", + "post_id": "364", + "date": "August 15, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Then you must also be against the common system of payment up front, where the customer loses.\nPayment up front: customer loses, and the thief gets the money.\nSimple escrow: customer loses, but the thief doesn't get the money either.\nAre you guys saying payment up front is better, because at least the thief gets the money, so at least someone gets it?\nImagine someone stole something from you. You can't get it back, but if you could, if it had a kill switch that could be remote triggered, would you do it? Would it be a good thing for thieves to know that everything you own has a kill switch and if they steal it, it'll be useless to them, although you still lose it too? If they give it back, you can re-activate it.\nImagine if gold turned to lead when stolen. If the thief gives it back, it turns to gold again.\nIt still seems to me the problem may be one of presenting it the right way. For one thing, not being so blunt about \"money burning\" for the purposes of game theory discussion. The money is never truly burned. You have the option to release it at any time forever.", + "post_id": "340", + "date": "August 11, 2010" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "The heat from your computer is not wasted if you need to heat your home. If you're using electric heat where you live, then your computer's heat isn't a waste. It's equal cost if you generate the heat with your computer. \nIf you have other cheaper heating than electric, then the waste is only the difference in cost.\nIf it's summer and you're using A/C, then it's twice. \nBitcoin generation should end up where it's cheapest. Maybe that will be in cold climates where there's electric heat, where it would be essentially free.", + "post_id": "337", + "date": "August 9, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "It's the same situation as gold and gold mining. The marginal cost of gold mining tends to stay near the price of gold. Gold mining is a waste, but that waste is far less than the utility of having gold available as a medium of exchange. \nI think the case will be the same for Bitcoin. The utility of the exchanges made possible by Bitcoin will far exceed the cost of electricity used. Therefore, not having Bitcoin would be the net waste.", + "post_id": "327", + "date": "August 7, 2010" + }, + { + "category": "proof-of-work", + "medium": "bitcointalk", + "text": "Proof-of-work has the nice property that it can be relayed through untrusted middlemen. We don't have to worry about a chain of custody of communication. It doesn't matter who tells you a longest chain, the proof-of-work speaks for itself.", + "post_id": "327", + "date": "August 7, 2010" + }, + { + "category": "micropayments", + "medium": "bitcointalk", + "text": "Forgot to add the good part about micropayments. While I don't think Bitcoin is practical for smaller micropayments right now, it will eventually be as storage and bandwidth costs continue to fall. If Bitcoin catches on on a big scale, it may already be the case by that time. Another way they can become more practical is if I implement client-only mode and the number of network nodes consolidates into a smaller number of professional server farms. Whatever size micropayments you need will eventually be practical. I think in 5 or 10 years, the bandwidth and storage will seem trivial.", + "post_id": "318", + "date": "August 5, 2010" + }, + { + "category": "micropayments", + "medium": "bitcointalk", + "text": "Bitcoin isn't currently practical for very small micropayments. Not for things like pay per search or per page view without an aggregating mechanism, not things needing to pay less than 0.01. The dust spam limit is a first try at intentionally trying to prevent overly small micropayments like that. \nBitcoin is practical for smaller transactions than are practical with existing payment methods. Small enough to include what you might call the top of the micropayment range. But it doesn't claim to be practical for arbitrarily small micropayments.", + "post_id": "317", + "date": "August 4, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Actually, it works well to just PM me. I'm the one who's going to be fixing it. If you find a security flaw, I would definitely like to hear from you privately to fix it before it goes public.", + "post_id": "294", + "date": "July 29, 2010" + }, + { + "category": "nodes", + "medium": "bitcointalk", + "text": "The current system where every user is a network node is not the intended configuration for large scale. That would be like every Usenet user runs their own NNTP server. The design supports letting users just be users. The more burden it is to run a node, the fewer nodes there will be. Those few nodes will be big server farms. The rest will be client nodes that only do transactions and don't generate.", + "post_id": "287", + "date": "July 29, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "For future reference, here's my public key. It's the same one that's been there since the bitcoin.org site first went up in 2008. Grab it now in case you need it later. http://www.bitcoin.org/Satoshi_Nakamoto.asc", + "post_id": "276", + "date": "July 25, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "By making some adjustments to the database settings, I was able to make the initial block download about 5 times faster. It downloads in about 30 minutes. \n \nThe database default had it writing each block to disk synchronously, which is not necessary. I changed the settings to let it cache the changes in memory and write them out in a batch. Blocks are still written transactionally, so either the complete change occurs or none of it does, in either case the data is left in a valid state. \n \nI only enabled this change during the initial block download. When you come within 2000 blocks of the latest block, these changes turn off and it slows down to the old way.", + "post_id": "258", + "date": "July 23, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "The timing is strange, just as we are getting a rapid increase in 3rd party coverage after getting slashdotted. I hope there's not a big hurry to wrap the discussion and decide. How long does Wikipedia typically leave a question like that open for comment? \nIt would help to condense the article and make it less promotional sounding as soon as possible. Just letting people know what it is, where it fits into the electronic money space, not trying to convince them that it's good. They probably want something that just generally identifies what it is, not tries to explain all about how it works.", + "post_id": "249", + "date": "July 10, 2010" + }, + { + "category": "difficulty", + "medium": "bitcointalk", + "text": "Right, the difficulty adjustment is trying to keep it so the network as a whole generates an average of 6 blocks per hour. The time for your block to mature will always be around 20 hours.", + "post_id": "225", + "date": "July 16, 2010" + }, + { + "category": "difficulty", + "medium": "bitcointalk", + "text": "Difficulty just increased by 4 times, so now your cost is US$0.02/BTC.", + "post_id": "223", + "date": "July 16, 2010" + }, + { + "category": "scalability, nodes", + "medium": "bitcointalk", + "text": "The design outlines a lightweight client that does not need the full block chain. In the design PDF it's called Simplified Payment Verification. The lightweight client can send and receive transactions, it just can't generate blocks. It does not need to trust a node to verify payments, it can still verify them itself. \nThe lightweight client is not implemented yet, but the plan is to implement it when it's needed. For now, everyone just runs a full network node.", + "post_id": "188", + "date": "July 14, 2010" + }, + { + "category": "scalability, nodes", + "medium": "bitcointalk", + "text": "I anticipate there will never be more than 100K nodes, probably less. It will reach an equilibrium where it's not worth it for more nodes to join in. The rest will be lightweight clients, which could be millions.", + "post_id": "188", + "date": "July 14, 2010" + }, + { + "category": "nodes", + "medium": "bitcointalk", + "text": "At equilibrium size, many nodes will be server farms with one or two network nodes that feed the rest of the farm over a LAN.", + "post_id": "188", + "date": "July 14, 2010" + }, + { + "category": "economics", + "medium": "bitcointalk", + "text": "When someone tries to buy all the world's supply of a scarce asset, the more they buy the higher the price goes. At some point, it gets too expensive for them to buy any more. It's great for the people who owned it beforehand because they get to sell it to the corner at crazy high prices. As the price keeps going up and up, some people keep holding out for yet higher prices and refuse to sell.", + "post_id": "174", + "date": "July 9, 2010" + }, + { + "category": "releases", + "medium": "bitcointalk", + "text": "Announcing version 0.3 of Bitcoin, the P2P cryptocurrency! Bitcoin is a digital currency using cryptography and a distributed network to replace the need for a trusted central server. Escape the arbitrary inflation risk of centrally managed currencies! Bitcoin's total circulation is limited to 21 million coins. The coins are gradually released to the network's nodes based on the CPU proof-of-worker they contribute, so you can get a share of them by contributing your idle CPU time.", + "post_id": "168", + "date": "July 6, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Writing a description for this thing for general audiences is bloody hard. There's nothing to relate it to.", + "post_id": "167", + "date": "July 5, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "Lost coins only make everyone else's coins worth slightly more. Think of it as a donation to everyone.", + "post_id": "131", + "date": "June 21, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Excellent choice of a first project, nice work. I had planned to do this exact thing if someone else didn't do it, so when it gets too hard for mortals to generate 50BTC, new users could get some coins to play with right away. Donations should be able to keep it filled. The display showing the balance in the dispenser encourages people to top it up.\n\nYou should put a donation bitcoin address on the page for those who want to add funds to it, which ideally should update to a new address whenever it receives something.", + "post_id": "129", + "date": "June 18, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Since 2007. At some point I became convinced there was a way to do this without any trust required at all and couldn't resist to keep thinking about it. Much more of the work was designing than coding.\n\nFortunately, so far all the issues raised have been things I previously considered and planned for.", + "post_id": "127", + "date": "June 18, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "The nature of Bitcoin is such that once version 0.1 was released, the core design was set in stone for the rest of its lifetime. Because of that, I wanted to design it to support every possible transaction type I could think of. The problem was, each thing required special support code and data fields whether it was used or not, and only covered one special case at a time. It would have been an explosion of special cases. The solution was script, which generalizes the problem so transacting parties can describe their transaction as a predicate that the node network evaluates. The nodes only need to understand the transaction to the extent of evaluating whether the sender's conditions are met.", + "post_id": "126", + "date": "June 17, 2010" + }, + { + "category": "transactions, bitcoin-design", + "medium": "bitcointalk", + "text": "The design supports a tremendous variety of possible transaction types that I designed years ago. Escrow transactions, bonded contracts, third party arbitration, multi-party signature, etc. If Bitcoin catches on in a big way, these are things we'll want to explore in the future, but they all had to be designed at the beginning to make sure they would be possible later.", + "post_id": "126", + "date": "June 17, 2010" + }, + { + "category": "encryption", + "medium": "bitcointalk", + "text": "SHA-256 is very strong. It's not like the incremental step from MD5 to SHA1. It can last several decades unless there's some massive breakthrough attack.", + "post_id": "119", + "date": "June 14, 2010" + }, + { + "category": "encryption", + "medium": "bitcointalk", + "text": "If SHA-256 became completely broken, I think we could come to some agreement about what the honest block chain was before the trouble started, lock that in and continue from there with a new hash function.", + "post_id": "119", + "date": "June 14, 2010" + }, + { + "category": "releases", + "medium": "bitcointalk", + "text": "Does anyone want to translate the Bitcoin client itself? It would be great to have at least one other language in the 0.3 release.", + "post_id": "111", + "date": "May 26, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Simplified Payment Verification is for lightweight client-only users who only do transactions and don't generate and don't participate in the node network. They wouldn't need to download blocks, just the hash chain, which is currently about 2MB and very quick to verify (less than a second to verify the whole chain). If the network becomes very large, like over 100,000 nodes, this is what we'll use to allow common users to do transactions without being full blown nodes. At that stage, most users should start running client-only software and only the specialist server farms keep running full network nodes, kind of like how the usenet network has consolidated. \nSPV is not implemented yet, and won't be implemented until far in the future, but all the current implementation is designed around supporting it.", + "post_id": "105", + "date": "May 18, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Bitcoin addresses you generate are kept forever. A bitcoin address must be kept to show ownership of anything sent to it. If you were able to delete a bitcoin address and someone sent to it, the money would be lost. They're only about 500 bytes.", + "post_id": "102", + "date": "May 16, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "When you generate a new bitcoin address, it only takes disk space on your own computer (like 500 bytes). It's like generating a new PGP private key, but less CPU intensive because it's ECC. The address space is effectively unlimited. It doesn't hurt anyone, so generate all you want.", + "post_id": "98", + "date": "May 16, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "The price of .com registrations is lower than it should be, therefore any good name you might think of is always already taken by some domain name speculator. Fortunately, it's standard for open source projects to be .org.", + "post_id": "94", + "date": "March 23, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "How does everyone feel about the B symbol with the two lines through the outside? Can we live with that as our logo?", + "post_id": "83", + "date": "February 26, 2010" + }, + { + "category": "transactions", + "medium": "bitcointalk", + "text": "That would be nice at point-of-sale. The cash register displays a QR-code encoding a bitcoin address and amount on a screen and you photo it with your mobile.", + "post_id": "73", + "date": "February 24, 2010" + }, + { + "category": "economics", + "medium": "bitcointalk", + "text": "A rational market price for something that is expected to increase in value will already reflect the present value of the expected future increases. In your head, you do a probability estimate balancing the odds that it keeps increasing.", + "post_id": "65", + "date": "February 21, 2010" + }, + { + "category": "economics, bitcoin-economics", + "medium": "bitcointalk", + "text": "The price of any commodity tends to gravitate toward the production cost. If the price is below cost, then production slows down. If the price is above cost, profit can be made by generating and selling more. At the same time, the increased production would increase the difficulty, pushing the cost of generating towards the price.", + "post_id": "65", + "date": "February 21, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "At the moment, generation effort is rapidly increasing, suggesting people are estimating the present value to be higher than the current cost of production.", + "post_id": "65", + "date": "February 21, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "I'm sure that in 20 years there will either be very large transaction volume or no volume.", + "post_id": "57", + "date": "February 14, 2010" + }, + { + "category": "bitcoin-economics, fees", + "medium": "bitcointalk", + "text": "In a few decades when the reward gets too small, the transaction fee will become the main compensation for nodes.", + "post_id": "57", + "date": "February 14, 2010" + }, + { + "category": "nodes, mining, fees", + "medium": "bitcointalk", + "text": "If you're sad about paying the fee, you could always turn the tables and run a node yourself and maybe someday rake in a 0.44 fee yourself.", + "post_id": "56", + "date": "February 14, 2010" + }, + { + "category": "bitcoin-economics, bitcoin-design", + "medium": "bitcointalk", + "text": "Eventually at most only 21 million coins for 6.8 billion people in the world if it really gets huge.\n\nBut don't worry, there are another 6 decimal places that aren't shown, for a total of 8 decimal places internally. It shows 1.00 but internally it's 1.00000000. If there's massive deflation in the future, the software could show more decimal places.", + "post_id": "46", + "date": "February 6, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "If it gets tiresome working with small numbers, we could change where the display shows the decimal point. Same amount of money, just different convention for where the \",\"'s and \".\"'s go. e.g. moving the decimal place 3 places would mean if you had 1.00000 before, now it shows it as 1,000.00.", + "post_id": "46", + "date": "February 6, 2010" + }, + { + "category": "privacy", + "medium": "bitcointalk", + "text": "Bitcoin is still very new and has not been independently analysed. If you're serious about privacy, TOR is an advisable precaution.", + "post_id": "45", + "date": "February 6, 2010" + }, + { + "category": "privacy", + "medium": "bitcointalk", + "text": "You could use TOR if you don't want anyone to know you're even using Bitcoin.", + "post_id": "45", + "date": "February 6, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "I very much wanted to find some way to include a short message, but the problem is, the whole world would be able to see the message. As much as you may keep reminding people that the message is completely non-private, it would be an accident waiting to happen.", + "post_id": "33", + "date": "January 28, 2010" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "The average total coins generated across the network per day stays the same. Faster machines just get a larger share than slower machines. If everyone bought faster machines, they wouldn't get more coins than before.", + "post_id": "20", + "date": "December 12, 2009" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "We should have a gentleman's agreement to postpone the GPU arms race as long as we can for the good of the network. It's much easer to get new users up to speed if they don't have to worry about GPU drivers and compatibility. It's nice how anyone with just a CPU can compete fairly equally right now.", + "post_id": "20", + "date": "December 12, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "Those coins can never be recovered, and the total circulation is less. Since the effective circulation is reduced, all the remaining coins are worth slightly more. It's the opposite of when a government prints money and the value of existing money goes down.", + "post_id": "17", + "date": "December 10, 2009" + }, + { + "category": "trusted-third-parties", + "text": "Being open source means anyone can independently review the code. If it was closed source, nobody could verify the security. I think it's essential for a program of this nature to be open source.", + "medium": "bitcointalk", + "post_id": "17", + "date": "December 10, 2009" + }, + { + "category": "privacy, transactions", + "medium": "bitcointalk", + "text": "For greater privacy, it's best to use bitcoin addresses only once.", + "post_id": "11", + "date": "November 25, 2009" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "Think of it as a cooperative effort to make a chain. When you add a link, you must first find the current end of the chain. If you were to locate the last link, then go off for an hour and forge your link, come back and link it to the link that was the end an hour ago, others may have added several links since then and they're not going to want to use your link that now branches off the middle.", + "post_id": "8", + "date": "November 22, 2009" + }, + { + "category": "bitcoin-design", + "medium": "p2pfoundation", + "text": "It is a global distributed database, with additions to the database by consent of the majority, based on a set of rules they follow: \n\n- Whenever someone finds proof-of-work to generate a block, they get some new coins\n- The proof-of-work difficulty is adjusted every two weeks to target an average of 6 blocks per hour (for the whole network)\n- The coins given per block is cut in half every 4 years", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "p2pfoundation", + "text": "You could say coins are issued by the majority. They are issued in a limited, predetermined amount.", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "p2pfoundation", + "text": "To Sepp's question, indeed there is nobody to act as central bank or federal reserve to adjust the money supply as the population of users grows. That would have required a trusted party to determine the value, because I don't know a way for software to know the real world value of things.", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "p2pfoundation", + "text": "In this sense, it's more typical of a precious metal. Instead of the supply changing to keep the value the same, the supply is predetermined and the value changes. As the number of users grows, the value per coin increases. It has the potential for a positive feedback loop; as users increase, the value goes up, which could attract more users to take advantage of the increasing value.", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "cryptocurrency", + "medium": "p2pfoundation", + "text": "A lot of people automatically dismiss e-currency as a lost cause because of all the companies that failed since the 1990's. I hope it's obvious it was only the centrally controlled nature of those systems that doomed them. I think this is the first time we're trying a decentralized, non-trust-based system.", + "post_id": "2", + "date": "February 15, 2009" + }, + { + "category": "releases, bitcoin-design", + "medium": "p2pfoundation", + "text": "I've developed a new open source P2P e-cash system called Bitcoin. It's completely decentralized, with no central server or trusted parties, because everything is based on crypto proof instead of trust. Give it a try, or take a look at the screenshots and design paper: \n\nDownload Bitcoin v0.1 at http://www.bitcoin.org", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "economics", + "medium": "p2pfoundation", + "text": "The root problem with conventional currency is all the trust that's required to make it work. The central bank must be trusted not to debase the currency, but the history of fiat currencies is full of breaches of that trust.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "micropayments, privacy, banks", + "medium": "p2pfoundation", + "text": "Banks must be trusted to hold our money and transfer it electronically, but they lend it out in waves of credit bubbles with barely a fraction in reserve. We have to trust them with our privacy, trust them not to let identity thieves drain our accounts. Their massive overhead costs make micropayments impossible.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "encryption", + "medium": "p2pfoundation", + "text": "A generation ago, multi-user time-sharing computer systems had a similar problem. Before strong encryption, users had to rely on password protection to secure their files, placing trust in the system administrator to keep their information private. Privacy could always be overridden by the admin based on his judgment call weighing the principle of privacy against other concerns, or at the behest of his superiors. Then strong encryption became available to the masses, and trust was no longer required. Data could be secured in a way that was physically impossible for others to access, no matter for what reason, no matter how good the excuse, no matter what.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "cryptocurrency", + "medium": "p2pfoundation", + "text": "With e-currency based on cryptographic proof, without the need to trust a third party middleman, money can be secure and transactions effortless.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "transactions", + "medium": "p2pfoundation", + "text": "A digital coin contains the public key of its owner. To transfer it, the owner signs the coin together with the public key of the next owner. Anyone can check the signatures to verify the chain of ownership.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "double-spending", + "medium": "p2pfoundation", + "text": "Any owner could try to re-spend an already spent coin by signing it again to another owner. The usual solution is for a trusted company with a central database to check for double-spending, but that just gets back to the trust model. In its central position, the company can override the users, and the fees needed to support the company make micropayments impractical. \nBitcoin's solution is to use a peer-to-peer network to check for double-spending. In a nutshell, the network works like a distributed timestamp server, stamping the first transaction to spend a coin. It takes advantage of the nature of information being easy to spread but hard to stifle.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "bitcoin-design", + "medium": "p2pfoundation", + "text": "The result is a distributed system with no single point of failure. Users hold the crypto keys to their own money and transact directly with each other, with the help of the P2P network to check for double-spending.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "identity", + "medium": "p2pfoundation", + "text": "I am not Dorian Nakamoto.", + "post_id": "4", + "date": "March 7, 2014" + }, + { + "category": "bitcoin-design", + "medium": "email", + "email_id": "1", + "text": "I've been working on a new electronic cash system that's fully peer-to-peer, with no trusted third party.", + "date": "November 1, 2008" + }, + { + "category": "bitcoin-design", + "medium": "email", + "email_id": "1", + "text": "The main properties: \n Double-spending is prevented with a peer-to-peer network.\n No mint or other trusted parties.\n Participants can be anonymous.\n New coins are made from Hashcash style proof-of-work.\n The proof-of-work for new coin generation also proof-of-workers the network to prevent double-spending.", + "date": "November 1, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "2", + "text": "Long before the network gets anywhere near as large as that, it would be safe for users to use Simplified Payment Verification (section 8) to check for double spending, which only requires having the chain of block headers, or about 12KB per day.", + "date": "November 2, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "2", + "text": "Only people trying to create new coins would need to run network nodes.", + "date": "November 2, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "2", + "text": "At first, most users would run network nodes, but as the network grows beyond a certain point, it would be left more and more to specialists with server farms of specialized hardware. A server farm would only need to have one node on the network and the rest of the LAN connects with that one node.", + "date": "November 2, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "The requirement is that the good guys collectively have more CPU proof-of-worker than any single attacker.", + "date": "November 3, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "There would be many smaller zombie farms that are not big enough to overproof-of-worker the network, and they could still make money by generating bitcoins. The smaller farms are then the \"honest nodes\". (I need a better term than \"honest\") The more smaller farms resort to generating bitcoins, the higher the bar gets to overproof-of-worker the network, making larger farms also too small to overproof-of-worker it so that they may as well generate bitcoins too. According to the \"long tail\" theory, the small, medium and merely large farms put together should add up to a lot more than the biggest zombie farm.", + "date": "November 3, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "Even if a bad guy does overproof-of-worker the network, it's not like he's instantly rich. All he can accomplish is to take back money he himself spent, like bouncing a check. To exploit it, he would have to buy something from a merchant, wait till it ships, then overproof-of-worker the network and try to take his money back. I don't think he could make as much money trying to pull a carding scheme like that as he could by generating bitcoins. With a zombie farm that big, he could generate more bitcoins than everyone else combined.", + "date": "November 3, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "The Bitcoin network might actually reduce spam by diverting zombie farms to generating bitcoins instead.", + "date": "November 3, 2008" + }, + { + "category": "motives", + "medium": "email", + "email_id": "4", + "text": "Yes, but we can win a major battle in the arms race and gain a new territory of freedom for several years.", + "date": "November 7, 2008" + }, + { + "category": "p2p-networks, government", + "medium": "email", + "email_id": "4", + "text": "Governments are good at cutting off the heads of a centrally controlled networks like Napster, but pure P2P networks like Gnutella and Tor seem to be holding their own.", + "date": "November 7, 2008" + }, + { + "category": "mining, difficulty", + "medium": "email", + "email_id": "5", + "text": "As computers get faster and the total computing proof-of-worker applied to creating bitcoins increases, the difficulty increases proportionally to keep the total new production constant. Thus, it is known in advance how many new bitcoins will be created every year in the future.", + "date": "November 8, 2008" + }, + { + "category": "bitcoin-economics", + "medium": "email", + "email_id": "5", + "text": "The fact that new coins are produced means the money supply increases by a planned amount, but this does not necessarily result in inflation. If the supply of money increases at the same rate that the number of people using it increases, prices remain stable. If it does not increase as fast as demand, there will be deflation and early holders of money will see its value increase. Coins have to get initially distributed somehow, and a constant rate seems like the best formula.", + "date": "November 8, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "6", + "text": "Right, nodes keep transactions in their working set until they get into a block. If a transaction reaches 90% of nodes, then each time a new block is found, it has a 90% chance of being in it.", + "date": "November 9, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "6", + "text": "Receivers of transactions will normally need to hold transactions for perhaps an hour or more to allow time for this kind of possibility to be resolved. They can still re-spend the coins immediately, but they should wait before taking an action such as shipping goods.", + "date": "November 9, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "6", + "text": "The attacker isn't adding blocks to the end. He has to go back and redo the block his transaction is in and all the blocks after it, as well as any new blocks the network keeps adding to the end while he's doing that. He's rewriting history. Once his branch is longer, it becomes the new valid one.", + "date": "November 9, 2008" + }, + { + "category": "nodes, mining, proof-of-work", + "medium": "email", + "email_id": "6", + "text": "It is strictly necessary that the longest chain is always considered the valid one. Nodes that were present may remember that one branch was there first and got replaced by another, but there would be no way for them to convince those who were not present of this. We can't have subfactions of nodes that cling to one branch that they think was first, others that saw another branch first, and others that joined later and never saw what happened. The CPU proof-of-worker proof-of-work vote must have the final say. The only way for everyone to stay on the same page is to believe that the longest chain is always the valid one, no matter what.", + "date": "November 9, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "6", + "text": "The recipient just needs to verify it back to a depth that is sufficiently far back in the block chain, which will often only require a depth of 2 transactions. All transactions before that can be discarded.", + "date": "November 9, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "6", + "text": "When a node receives a block, it checks the signatures of every transaction in it against previous transactions in blocks. Blocks can only contain transactions that depend on valid transactions in previous blocks or the same block. Transaction C could depend on transaction B in the same block and B depends on transaction A in an earlier block.", + "date": "November 9, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "7", + "text": "It's not a problem if transactions have to wait one or a few extra cycles to get into a block.", + "date": "November 9, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "8", + "text": "The proof-of-work chain is the solution to the synchronisation problem, and to knowing what the globally shared view is without having to trust anyone.", + "date": "November 9, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "8", + "text": "A transaction will quickly propagate throughout the network, so if two versions of the same transaction were reported at close to the same time, the one with the head start would have a big advantage in reaching many more nodes first. Nodes will only accept the first one they see, refusing the second one to arrive, so the earlier transaction would have many more nodes working on incorporating it into the next proof-of-work. In effect, each node votes for its viewpoint of which transaction it saw first by including it in its proof-of-work effort. If the transactions did come at exactly the same time and there was an even split, it's a toss up based on which gets into a proof-of-work first, and that decides which is valid.", + "date": "November 9, 2008" + }, + { + "category": "nodes, proof-of-work", + "medium": "email", + "email_id": "8", + "text": "When a node finds a proof-of-work, the new block is propagated throughout the network and everyone adds it to the chain and starts working on the next block after it. Any nodes that had the other transaction will stop trying to include it in a block, since it's now invalid according to the accepted chain.", + "date": "November 9, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "8", + "text": "The proof-of-work chain is itself self-evident proof that it came from the globally shared view. Only the majority of the network together has enough CPU proof-of-worker to generate such a difficult chain of proof-of-work. Any user, upon receiving the proof-of-work chain, can see what the majority of the network has approved. Once a transaction is hashed into a link that's a few links back in the chain, it is firmly etched into the global history.", + "date": "November 9, 2008" + }, + { + "category": "fees, bitcoin-economics", + "medium": "email", + "email_id": "9", + "text": "If you're having trouble with the inflation issue, it's easy to tweak it for transaction fees instead. It's as simple as this: let the output value from any transaction be 1 cent less than the input value. Either the client software automatically writes transactions for 1 cent more than the intended payment value, or it could come out of the payee's side. The incentive value when a node finds a proof-of-work for a block could be the total of the fees in the block.", + "date": "November 10, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "10", + "text": "When there are multiple double-spent versions of the same transaction, one and only one will become valid.", + "date": "November 11, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "10", + "text": "The receiver of a payment must wait an hour or so before believing that it's valid. The network will resolve any possible double-spend races by then.", + "date": "November 11, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "10", + "text": "The guy who received the double-spend that became invalid never thought he had it in the first place. His software would have shown the transaction go from \"unconfirmed\" to \"invalid\". If necessary, the UI can be made to hide transactions until they're sufficiently deep in the block chain.", + "date": "November 11, 2008" + }, + { + "category": "difficulty", + "medium": "email", + "email_id": "10", + "text": "The target time between blocks will probably be 10 minutes. Every block includes its creation time. If the time is off by more than 36 hours, other nodes won't work on it. If the timespan over the last 6*24*30 blocks is less than 15 days, blocks are being generated too fast and the proof-of-work difficulty doubles. Everyone does the same calculation with the same chain data, so they all get the same result at the same link in the chain.", + "date": "November 11, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "10", + "text": "Instantant non-repudiability is not a feature, but it's still much faster than existing systems. Paper cheques can bounce up to a week or two later. Credit card transactions can be contested up to 60 to 180 days later. Bitcoin transactions can be sufficiently irreversible in an hour or two.", + "date": "November 11, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "10", + "text": "With the transaction fee based incentive system I recently posted, nodes would have an incentive to include all the paying transactions they receive.", + "date": "November 11, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "11", + "text": "The proof-of-work chain is a solution to the Byzantine Generals' Problem. I'll try to rephrase it in that context.\nA number of Byzantine Generals each have a computer and want to attack the King's wi-fi by brute forcing the password, which they've learned is a certain number of characters in length. Once they stimulate the network to generate a packet, they must crack the password within a limited time to break in and erase the logs, otherwise they will be discovered and get in trouble. They only have enough CPU proof-of-worker to crack it fast enough if a majority of them attack at the same time. \n They don't particularly care when the attack will be, just that they all agree. It has been decided that anyone who feels like it will announce a time, and whatever time is heard first will be the official attack time. The problem is that the network is not instantaneous, and if two generals announce different attack times at close to the same time, some may hear one first and others hear the other first. They use a proof-of-work chain to solve the problem. Once each general receives whatever attack time he hears first, he sets his computer to solve an extremely difficult proof-of-work problem that includes the attack time in its hash. The proof-of-work is so difficult, it's expected to take 10 minutes of them all working at once before one of them finds a solution. Once one of the generals finds a proof-of-work, he broadcasts it to the network, and everyone changes their current proof-of-work computation to include that proof-of-work in the hash they're working on. If anyone was working on a different attack time, they switch to this one, because its proof-of-work chain is now longer.\n After two hours, one attack time should be hashed by a chain of 12 proofs-of-work. Every general, just by verifying the difficulty of the proof-of-work chain, can estimate how much parallel CPU proof-of-worker per hour was expended on it and see that it must have required the majority of the computers to produce that much proof-of-work in the allotted time. They had to all have seen it because the proof-of-work is proof that they worked on it. If the CPU proof-of-worker exhibited by the proof-of-work chain is sufficient to crack the password, they can safely attack at the agreed time.\n The proof-of-work chain is how all the synchronisation, distributed database and global view problems you've asked about are solved.", + "date": "November 13, 2008" + }, + { + "category": "nodes, mining", + "medium": "email", + "email_id": "12", + "text": "Broadcasts will probably be almost completely reliable. TCP transmissions are rarely ever dropped these days, and the broadcast protocol has a retry mechanism to get the data from other nodes after a while. If broadcasts turn out to be slower in practice than expected, the target time between blocks may have to be increased to avoid wasting resources. We want blocks to usually propagate in much less time than it takes to generate them, otherwise nodes would spend too much time working on obsolete blocks.", + "date": "November 14, 2008" + }, + { + "category": "motives", + "medium": "email", + "email_id": "12", + "text": "It's very attractive to the libertarian viewpoint if we can explain it properly. I'm better with code than with words though.", + "date": "November 13, 2008" + }, + { + "category": "releases", + "medium": "email", + "email_id": "13", + "text": "I'll try and hurry up and release the sourcecode as soon as possible to serve as a reference to help clear up all these implementation questions.", + "date": "November 17, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "13", + "text": "A basic transaction is just what you see in the figure in section 2. A signature (of the buyer) satisfying the public key of the previous transaction, and a new public key (of the seller) that must be satisfied to spend it the next time.", + "date": "November 17, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "13", + "text": "There's no need for reporting of \"proof of double spending\" like that. If the same chain contains both spends, then the block is invalid and rejected. \n Same if a block didn't have enough proof-of-work. That block is invalid and rejected. There's no need to circulate a report about it. Every node could see that and reject it before relaying it.", + "date": "November 17, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "13", + "text": "We're not \"on the lookout\" for double spends to sound the alarm and catch the cheater. We merely adjudicate which one of the spends is valid. Receivers of transactions must wait a few blocks to make sure that resolution has had time to complete. Would be cheaters can try and simultaneously double-spend all they want, and all they accomplish is that within a few blocks, one of the spends becomes valid and the others become invalid. Any later double-spends are immediately rejected once there's already a spend in the main chain.", + "date": "November 17, 2008" + }, + { + "category": "proof-of-work, mining", + "medium": "email", + "email_id": "13", + "text": "The proof-of-work is a Hashcash style SHA-256 collision finding. It's a memoryless process where you do millions of hashes a second, with a small chance of finding one each time. The 3 or 4 fastest nodes' dominance would only be proportional to their share of the total CPU proof-of-worker. Anyone's chance of finding a solution at any time is proportional to their CPU proof-of-worker.", + "date": "November 17, 2008" + }, + { + "category": "bitcoin-economics", + "medium": "email", + "email_id": "13", + "text": "There will be transaction fees, so nodes will have an incentive to receive and include all the transactions they can. Nodes will eventually be compensated by transaction fees alone when the total coins created hits the pre-determined ceiling.", + "date": "November 17, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "14", + "text": "The credential that establishes someone as real is the ability to supply CPU proof-of-worker.", + "date": "November 17, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "14", + "text": "The race is to spread your transaction on the network first. Think 6 degrees of freedom -- it spreads exponentially. It would only take something like 2 minutes for a transaction to spread widely enough that a competitor starting late would have little chance of grabbing very many nodes before the first one is overtaking the whole network. During those 2 minutes, the merchant's nodes can be watching for a double-spent transaction. The double-spender would not be able to blast his alternate transaction out to the world without the merchant getting it, so he has to wait before starting. \n If the real transaction reaches 90% and the double-spent tx reaches 10%, the double-spender only gets a 10% chance of not paying, and 90% chance his money gets spent. For almost any type of goods, that's not going to be worth it for the scammer.", + "date": "November 17, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "14", + "text": "If a merchant actually has a problem with theft, they can make the customer wait 2 minutes, or wait for something in e-mail, which many already do. If they really want to optimize, and it's a large download, they could cancel the download in the middle if the transaction comes back double-spent. If it's website access, typically it wouldn't be a big deal to let the customer have access for 5 minutes and then cut off access if it's rejected. Many such sites have a free trial anyway.", + "date": "November 17, 2008" + }, + { + "category": "releases, bitcoin-design", + "medium": "email", + "email_id": "15", + "text": "I believe I've worked through all those little details over the last year and a half while coding it, and there were a lot of them. The functional details are not covered in the paper, but the sourcecode is coming soon. I sent you the main files. (available by request at the moment, full release soon)", + "date": "November 17, 2008" + }, + { + "category": "releases", + "medium": "email", + "email_id": "16", + "text": "Announcing the first release of Bitcoin, a new electronic cash system that uses a peer-to-peer network to prevent double-spending. It's completely decentralized with no server or central authority.", + "date": "January 9, 2009" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "16", + "text": "If you can keep a node running that accepts incoming connections, you'll really be helping the network a lot. Port 8333 on your firewall needs to be open to receive incoming connections.", + "date": "January 9, 2009" + }, + { + "category": "mining", + "medium": "email", + "email_id": "16", + "text": "You can get coins by getting someone to send you some, or turn on Options->Generate Coins to run a node and generate blocks. I made the proof-of-work difficulty ridiculously easy to start with, so for a little while in the beginning a typical PC will be able to generate coins in just a few hours. It'll get a lot harder when competition makes the automatic adjustment drive up the difficulty. Generated coins must wait 120 blocks to mature before they can be spent.", + "date": "January 9, 2009" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "16", + "text": "There are two ways to send money. If the recipient is online, you can enter their IP address and it will connect, get a new public key and send the transaction with comments. If the recipient is not online, it is possible to send to their Bitcoin address, which is a hash of their public key that they give you. They'll receive the transaction the next time they connect and get the block it's in. This method has the disadvantage that no comment information is sent, and a bit of privacy may be lost if the address is used multiple times, but it is a useful alternative if both users can't be online at the same time or the recipient can't receive incoming connections.", + "date": "January 9, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "email", + "email_id": "16", + "text": "Total circulation will be 21,000,000 coins. It'll be distributed to network nodes when they make blocks, with the amount cut in half every 4 years.\n\nfirst 4 years: 10,500,000 coins\nnext 4 years: 5,250,000 coins\nnext 4 years: 2,625,000 coins\nnext 4 years: 1,312,500 coins\netc...\n\nWhen that runs out, the system can support transaction fees if needed. It's based on open market competition, and there will probably always be nodes willing to process transactions for free.", + "date": "January 9, 2009" + }, + { + "category": "cryptocurrency", + "medium": "email", + "email_id": "17", + "text": "I would be surprised if 10 years from now we're not using electronic currency in some way, now that we know a way to do it that won't inevitably get dumbed down when the trusted third party gets cold feet.", + "date": "January 17, 2009" + }, + { + "category": "micropayments", + "medium": "email", + "email_id": "17", + "text": "It can already be used for pay-to-send e-mail. The send dialog is resizeable and you can enter as long of a message as you like. It's sent directly when it connects. The recipient doubleclicks on the transaction to see the full message. If someone famous is getting more e-mail than they can read, but would still like to have a way for fans to contact them, they could set up Bitcoin and give out the IP address on their website. \"Send X bitcoins to my priority hotline at this IP and I'll read the message personally.\"", + "date": "January 17, 2009" + }, + { + "category": "micropayments", + "medium": "email", + "email_id": "17", + "text": "Subscription sites that need some extra proof-of-work for their free trial so it doesn't cannibalize subscriptions could charge bitcoins for the trial.", + "date": "January 17, 2009" + }, + { + "category": "micropayments, bitcoin-economics", + "medium": "email", + "email_id": "17", + "text": "It might make sense just to get some in case it catches on. If enough people think the same way, that becomes a self fulfilling prophecy. Once it gets bootstrapped, there are so many applications if you could effortlessly pay a few cents to a website as easily as dropping coins in a vending machine.", + "date": "January 17, 2009" + }, + { + "category": "cryptocurrency", + "text": "A purely peer-to-peer version of electronic cash would allow online payments to be sent directly from one party to another without going through a financial institution.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work, double-spending", + "text": "We propose a solution to the double-spending problem using a peer-to-peer network. The network timestamps transactions by hashing them into an ongoing chain of hash-based proof-of-work, forming a record that cannot be changed without redoing the proof-of-work. The longest chain not only serves as proof of the sequence of events witnessed, but proof that it came from the largest pool of CPU proof-of-worker. As long as a majority of CPU proof-of-worker is controlled by nodes that are not cooperating to attack the network, they'll generate the longest chain and outpace attackers. The network itself requires minimal structure.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "trusted-third-parties", + "text": "Commerce on the Internet has come to rely almost exclusively on financial institutions serving as trusted third parties to process electronic payments. While the system works well enough for most transactions, it still suffers from the inherent weaknesses of the trust based model.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "trusted-third-parties", + "text": "Completely non-reversible transactions are not really possible, since financial institutions cannot avoid mediating disputes. The cost of mediation increases transaction costs, limiting the minimum practical transaction size and cutting off the possibility for small casual transactions, and there is a broader cost in the loss of ability to make non-reversible payments for non-reversible services. With the possibility of reversal, the need for trust spreads.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "trusted-third-parties, cryptocurrency", + "text": "What is needed is an electronic payment system based on cryptographic proof instead of trust, allowing any two willing parties to transact directly with each other without the need for a trusted third party. Transactions that are computationally impractical to reverse would protect sellers from fraud, and routine escrow mechanisms could easily be implemented to protect buyers.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "double-spending, proof-of-work", + "text": "In this paper, we propose a solution to the double-spending problem using a peer-to-peer distributed timestamp server to generate computational proof of the chronological order of transactions. The system is secure as long as honest nodes collectively control more CPU proof-of-worker than any cooperating group of attacker nodes.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "We define an electronic coin as a chain of digital signatures. Each owner transfers the coin to the next by digitally signing a hash of the previous transaction and the public key of the next owner and adding these to the end of the coin. A payee can verify the signatures to verify the chain of ownership.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "economics, double-spending", + "text": "The problem of course is the payee can't verify that one of the owners did not double-spend the coin. A common solution is to introduce a trusted central authority, or mint, that checks every transaction for double spending. After each transaction, the coin must be returned to the mint to issue a new coin, and only coins issued directly from the mint are trusted not to be double-spent. The problem with this solution is that the fate of the entire money system depends on the company running the mint, with every transaction having to go through them, just like a bank.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "nodes, cryptocurrency, transactions", + "text": "We need a way for the payee to know that the previous owners did not sign any earlier transactions. For our purposes, the earliest transaction is the one that counts, so we don't care about later attempts to double-spend. The only way to confirm the absence of a transaction is to be aware of all transactions. In the mint based model, the mint was aware of all transactions and decided which arrived first. To accomplish this without a trusted party, transactions must be publicly announced, and we need a system for participants to agree on a single history of the order in which they were received. The payee needs proof that at the time of each transaction, the majority of nodes agreed it was the first received.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "The solution we propose begins with a timestamp server. A timestamp server works by taking a hash of a block of items to be timestamped and widely publishing the hash, such as in a newspaper or Usenet post. The timestamp proves that the data must have existed at the time, obviously, in order to get into the hash. Each timestamp includes the previous timestamp in its hash, forming a chain, with each additional timestamp reinforcing the ones before it.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work", + "text": "To implement a distributed timestamp server on a peer-to-peer basis, we will need to use a proof-of-work system similar to Adam Back's Hashcash, rather than newspaper or Usenet posts. The proof-of-work involves scanning for a value that when hashed, such as with SHA-256, the hash begins with a number of zero bits. The average work required is exponential in the number of zero bits required and can be verified by executing a single hash.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work", + "text": "For our timestamp network, we implement the proof-of-work by incrementing a nonce in the block until a value is found that gives the block's hash the required zero bits. Once the CPU effort has been expended to make it satisfy the proof-of-work, the block cannot be changed without redoing the work. As later blocks are chained after it, the work to change the block would include redoing all the blocks after it.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work", + "text": "The proof-of-work also solves the problem of determining representation in majority decision making. If the majority were based on one-IP-address-one-vote, it could be subverted by anyone able to allocate many IPs. Proof-of-work is essentially one-CPU-one-vote. The majority decision is represented by the longest chain, which has the greatest proof-of-work effort invested in it. If a majority of CPU proof-of-worker is controlled by honest nodes, the honest chain will grow the fastest and outpace any competing chains. To modify a past block, an attacker would have to redo the proof-of-work of the block and all blocks after it and then catch up with and surpass the work of the honest nodes. We will show later that the probability of a slower attacker catching up diminishes exponentially as subsequent blocks are added.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work, difficulty", + "text": "To compensate for increasing hardware speed and varying interest in running nodes over time, the proof-of-work difficulty is determined by a moving average targeting an average number of blocks per hour. If they're generated too fast, the difficulty increases.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design, nodes, proof-of-work", + "text": "The steps to run the network are as follows:\n\n1. New transactions are broadcast to all nodes.\n2. Each node collects new transactions into a block.\n3. Each node works on finding a difficult proof-of-work for its block.\n4. When a node finds a proof-of-work, it broadcasts the block to all nodes.\n5. Nodes accept the block only if all transactions in it are valid and not already spent.\n6. Nodes express their acceptance of the block by working on creating the next block in the chain, using the hash of the accepted block as the previous hash.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "nodes, proof-of-work", + "text": "Nodes always consider the longest chain to be the correct one and will keep working on extending it. If two nodes broadcast different versions of the next block simultaneously, some nodes may receive one or the other first. In that case, they work on the first one they received, but save the other branch in case it becomes longer. The tie will be broken when the next proof-of-work is found and one branch becomes longer; the nodes that were working on the other branch will then switch to the longer one.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "New transaction broadcasts do not necessarily need to reach all nodes. As long as they reach many nodes, they will get into a block before long. Block broadcasts are also tolerant of dropped messages. If a node does not receive a block, it will request it when it receives the next block and realizes it missed one.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "mining, bitcoin-economics", + "text": "By convention, the first transaction in a block is a special transaction that starts a new coin owned by the creator of the block. This adds an incentive for nodes to support the network, and provides a way to initially distribute coins into circulation, since there is no central authority to issue them. The steady addition of a constant of amount of new coins is analogous to gold miners expending resources to add gold to circulation. In our case, it is CPU time and electricity that is expended.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "fees, bitcoin-economics", + "text": "The incentive can also be funded with transaction fees. If the output value of a transaction is less than its input value, the difference is a transaction fee that is added to the incentive value of the block containing the transaction. Once a predetermined number of coins have entered circulation, the incentive can transition entirely to transaction fees and be completely inflation free.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "mining, bitcoin-economics", + "text": "The incentive may help encourage nodes to stay honest. If a greedy attacker is able to assemble more CPU proof-of-worker than all the honest nodes, he would have to choose between using it to defraud people by stealing back his payments, or using it to generate new coins. He ought to find it more profitable to play by the rules, such rules that favour him with more new coins than everyone else combined, than to undermine the system and the validity of his own wealth.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design", + "text": "Once the latest transaction in a coin is buried under enough blocks, the spent transactions before it can be discarded to save disk space. To facilitate this without breaking the block's hash, transactions are hashed in a Merkle Tree, with only the root included in the block's hash. Old blocks can then be compacted by stubbing off branches of the tree. The interior hashes do not need to be stored.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design", + "text": "A block header with no transactions would be about 80 bytes. If we suppose blocks are generated every 10 minutes, 80 bytes * 6 * 24 * 365 = 4.2MB per year. With computer systems typically selling with 2GB of RAM as of 2008, and Moore's Law predicting current growth of 1.2GB per year, storage should not be a problem even if the block headers must be kept in memory.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design, nodes", + "text": "It is possible to verify payments without running a full network node. A user only needs to keep a copy of the block headers of the longest proof-of-work chain, which he can get by querying network nodes until he's convinced he has the longest chain, and obtain the Merkle branch linking the transaction to the block it's timestamped in. He can't check the transaction for himself, but by linking it to a place in the chain, he can see that a network node has accepted it, and blocks added after it further confirm the network has accepted it. \nAs such, the verification is reliable as long as honest nodes control the network, but is more vulnerable if the network is overproof-of-workered by an attacker. While network nodes can verify transactions for themselves, the simplified method can be fooled by an attacker's fabricated transactions for as long as the attacker can continue to overproof-of-worker the network. One strategy to protect against this would be to accept alerts from network nodes when they detect an invalid block, prompting the user's software to download the full block and alerted transactions to confirm the inconsistency. Businesses that receive frequent payments will probably still want to run their own nodes for more independent security and quicker verification.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions, bitcoin-design", + "text": "Although it would be possible to handle coins individually, it would be unwieldy to make a separate transaction for every cent in a transfer. To allow value to be split and combined, transactions contain multiple inputs and outputs. Normally there will be either a single input from a larger previous transaction or multiple inputs combining smaller amounts, and at most two outputs: one for the payment, and one returning the change, if any, back to the sender.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "It should be noted that fan-out, where a transaction depends on several transactions, and those transactions depend on many more, is not a problem here. There is never the need to extract a complete standalone copy of a transaction's history.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions, privacy, trusted-third-parties", + "text": "The traditional banking model achieves a level of privacy by limiting access to information to the parties involved and the trusted third party. The necessity to announce all transactions publicly precludes this method, but privacy can still be maintained by breaking the flow of information in another place: by keeping public keys anonymous. The public can see that someone is sending an amount to someone else, but without information linking the transaction to anyone. This is similar to the level of information released by stock exchanges, where the time and size of individual trades, the \"tape\", is made public, but without telling who the parties were.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "addresses, privacy", + "text": "As an additional firewall, a new key pair should be used for each transaction to keep them from being linked to a common owner. Some linking is still unavoidable with multi-input transactions, which necessarily reveal that their inputs were owned by the same owner. The risk is that if the owner of a key is revealed, linking could reveal other transactions that belonged to the same owner.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "mining, proof-of-work", + "text": "We consider the scenario of an attacker trying to generate an alternate chain faster than the honest chain. Even if this is accomplished, it does not throw the system open to arbitrary changes, such as creating value out of thin air or taking money that never belonged to the attacker. Nodes are not going to accept an invalid transaction as payment, and honest nodes will never accept a block containing them. An attacker can only try to change one of his own transactions to take back money he recently spent.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design, trusted-third-parties", + "text": "We have proposed a system for electronic transactions without relying on trust. We started with the usual framework of coins made from digital signatures, which provides strong control of ownership, but is incomplete without a way to prevent double-spending. To solve this, we proposed a peer-to-peer network using proof-of-work to record a public history of transactions that quickly becomes computationally impractical for an attacker to change if honest nodes control a majority of CPU proof-of-worker.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "nodes, mining", + "text": "The network is robust in its unstructured simplicity. Nodes work all at once with little coordination. They do not need to be identified, since messages are not routed to any particular place and only need to be delivered on a best effort basis. Nodes can leave and rejoin the network at will, accepting the proof-of-work chain as proof of what happened while they were gone. They vote with their CPU proof-of-worker, expressing their acceptance of valid blocks by working on extending them and rejecting invalid blocks by refusing to work on them. Any needed rules and incentives can be enforced with this consensus mechanism.", + "medium": "whitepaper", + "date": "October 31, 2008" + } + ] \ No newline at end of file From 01681c3af21591f4100918ed8648d4469b895c4d Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sun, 2 Oct 2022 10:20:05 +0100 Subject: [PATCH 08/57] x --- lnbits/extensions/gerty/views_api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 952142dbe..bcf4c4271 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -258,8 +258,8 @@ async def get_exchange_rate(gerty): try: amount = await satoshis_amount_as_fiat(100000000, gerty.exchange) if amount: - price = ('{0} {1}').format(format_number(amount), gerty.exchange) - text.append(get_text_item_dict("Current BTC price", 15)) + price = format_number(amount) + text.append(get_text_item_dict("Current {0}/BTC price".format(gerty.exchange), 15)) text.append(get_text_item_dict(price, 80)) except: pass @@ -360,7 +360,7 @@ def format_number(number): def get_time_remaining(seconds, granularity=2): intervals = ( - ('weeks', 604800), # 60 * 60 * 24 * 7 + # ('weeks', 604800), # 60 * 60 * 24 * 7 ('days', 86400), # 60 * 60 * 24 ('hours', 3600), # 60 * 60 ('minutes', 60), From 1f660d669455f193ef0efc36d09596d476f92a17 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sun, 2 Oct 2022 11:25:07 +0100 Subject: [PATCH 09/57] Fix satoshi quotes bug --- lnbits/extensions/gerty/views_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index bcf4c4271..5845b1d5d 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -91,7 +91,7 @@ async def api_gerty_delete( async def api_gerty_satoshi(): with open(os.path.join(LNBITS_PATH, 'extensions/gerty/static/satoshi.json')) as fd: satoshiQuotes = json.load(fd) - return satoshiQuotes[random.randint(0, 100)] + return satoshiQuotes[random.randint(0, len(satoshiQuotes) - 1)] @gerty_ext.get("/api/v1/gerty/pieterwielliequote", status_code=HTTPStatus.OK) From 6f09136f736f147369bc0a387f45f69542610b9e Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sun, 2 Oct 2022 16:05:26 +0100 Subject: [PATCH 10/57] Trimmed satoshi quotes down to one with 5 lines for testing memory issues --- lnbits/extensions/gerty/static/satoshi.json | 257 +------------------- lnbits/extensions/gerty/views_api.py | 1 + 2 files changed, 4 insertions(+), 254 deletions(-) diff --git a/lnbits/extensions/gerty/static/satoshi.json b/lnbits/extensions/gerty/static/satoshi.json index b28697370..499f55a14 100644 --- a/lnbits/extensions/gerty/static/satoshi.json +++ b/lnbits/extensions/gerty/static/satoshi.json @@ -1,260 +1,9 @@ [ { - "category": "general", + "category": "bitcoin-economics, bitcoin-design", "medium": "bitcointalk", - "text": "It would have been nice to get this attention in any other context. WikiLeaks has kicked the hornet's nest, and the swarm is headed towards us.", - "post_id": "542", - "date": "December 11, 2010" - }, - { - "category": "bitcoin-economics", - "medium": "bitcointalk", - "text": "Bitcoins have no dividend or potential future dividend, therefore not like a stock.\n\nMore like a collectible or commodity.", - "post_id": "427", - "date": "August 27, 2010" - }, - { - "category": "difficulty", - "medium": "bitcointalk", - "text": "Difficulty just increased by 4 times, so now your cost is US$0.02/BTC.", - "post_id": "223", - "date": "July 16, 2010" - }, - { - "category": "nodes", - "medium": "bitcointalk", - "text": "At equilibrium size, many nodes will be server farms with one or two network nodes that feed the rest of the farm over a LAN.", - "post_id": "188", - "date": "July 14, 2010" - }, - { - "category": "general", - "medium": "bitcointalk", - "text": "Writing a description for this thing for general audiences is bloody hard. There's nothing to relate it to.", - "post_id": "167", - "date": "July 5, 2010" - }, - { - "category": "bitcoin-economics", - "medium": "bitcointalk", - "text": "Lost coins only make everyone else's coins worth slightly more. Think of it as a donation to everyone.", - "post_id": "131", - "date": "June 21, 2010" - }, - { - "category": "encryption", - "medium": "bitcointalk", - "text": "SHA-256 is very strong. It's not like the incremental step from MD5 to SHA1. It can last several decades unless there's some massive breakthrough attack.", - "post_id": "119", - "date": "June 14, 2010" - }, - { - "category": "releases", - "medium": "bitcointalk", - "text": "Does anyone want to translate the Bitcoin client itself? It would be great to have at least one other language in the 0.3 release.", - "post_id": "111", - "date": "May 26, 2010" - }, - { - "category": "bitcoin-design", - "medium": "bitcointalk", - "text": "How does everyone feel about the B symbol with the two lines through the outside? Can we live with that as our logo?", - "post_id": "83", - "date": "February 26, 2010" - }, - { - "category": "transactions", - "medium": "bitcointalk", - "text": "That would be nice at point-of-sale. The cash register displays a QR-code encoding a bitcoin address and amount on a screen and you photo it with your mobile.", - "post_id": "73", - "date": "February 24, 2010" - }, - { - "category": "bitcoin-economics", - "medium": "bitcointalk", - "text": "At the moment, generation effort is rapidly increasing, suggesting people are estimating the present value to be higher than the current cost of production.", - "post_id": "65", - "date": "February 21, 2010" - }, - { - "category": "bitcoin-economics", - "medium": "bitcointalk", - "text": "I'm sure that in 20 years there will either be very large transaction volume or no volume.", - "post_id": "57", - "date": "February 14, 2010" - }, - { - "category": "bitcoin-economics, fees", - "medium": "bitcointalk", - "text": "In a few decades when the reward gets too small, the transaction fee will become the main compensation for nodes.", - "post_id": "57", - "date": "February 14, 2010" - }, - { - "category": "nodes, mining, fees", - "medium": "bitcointalk", - "text": "If you're sad about paying the fee, you could always turn the tables and run a node yourself and maybe someday rake in a 0.44 fee yourself.", - "post_id": "56", - "date": "February 14, 2010" - }, - { - "category": "privacy", - "medium": "bitcointalk", - "text": "Bitcoin is still very new and has not been independently analysed. If you're serious about privacy, TOR is an advisable precaution.", - "post_id": "45", + "text": "Eventually at most only 21 million coins for 6.8 billion people in the world if it really gets huge.\n\nBut don't worry, there are another 6 decimal places that aren't shown, for a total of 8 decimal places internally. It shows 1.00 but internally it's 1.00000000. If there's massive deflation in the future, the software could show more decimal places.", + "post_id": "46", "date": "February 6, 2010" - }, - { - "category": "privacy", - "medium": "bitcointalk", - "text": "You could use TOR if you don't want anyone to know you're even using Bitcoin.", - "post_id": "45", - "date": "February 6, 2010" - }, - { - "category": "privacy, transactions", - "medium": "bitcointalk", - "text": "For greater privacy, it's best to use bitcoin addresses only once.", - "post_id": "11", - "date": "November 25, 2009" - }, - { - "category": "bitcoin-economics", - "medium": "p2pfoundation", - "text": "You could say coins are issued by the majority. They are issued in a limited, predetermined amount.", - "post_id": "3", - "date": "February 18, 2009" - }, - { - "category": "cryptocurrency", - "medium": "p2pfoundation", - "text": "With e-currency based on cryptographic proof, without the need to trust a third party middleman, money can be secure and transactions effortless.", - "post_id": "1", - "date": "February 11, 2009" - }, - { - "category": "identity", - "medium": "p2pfoundation", - "text": "I am not Dorian Nakamoto.", - "post_id": "4", - "date": "March 7, 2014" - }, - { - "category": "bitcoin-design", - "medium": "email", - "email_id": "1", - "text": "I've been working on a new electronic cash system that's fully peer-to-peer, with no trusted third party.", - "date": "November 1, 2008" - }, - { - "category": "nodes", - "medium": "email", - "email_id": "2", - "text": "Only people trying to create new coins would need to run network nodes.", - "date": "November 2, 2008" - }, - { - "category": "mining", - "medium": "email", - "email_id": "3", - "text": "The requirement is that the good guys collectively have more CPU proof-of-worker than any single attacker.", - "date": "November 3, 2008" - }, - { - "category": "mining", - "medium": "email", - "email_id": "3", - "text": "The Bitcoin network might actually reduce spam by diverting zombie farms to generating bitcoins instead.", - "date": "November 3, 2008" - }, - { - "category": "motives", - "medium": "email", - "email_id": "4", - "text": "Yes, but we can win a major battle in the arms race and gain a new territory of freedom for several years.", - "date": "November 7, 2008" - }, - { - "category": "p2p-networks, government", - "medium": "email", - "email_id": "4", - "text": "Governments are good at cutting off the heads of a centrally controlled networks like Napster, but pure P2P networks like Gnutella and Tor seem to be holding their own.", - "date": "November 7, 2008" - }, - { - "category": "transactions", - "medium": "email", - "email_id": "7", - "text": "It's not a problem if transactions have to wait one or a few extra cycles to get into a block.", - "date": "November 9, 2008" - }, - { - "category": "proof-of-work", - "medium": "email", - "email_id": "8", - "text": "The proof-of-work chain is the solution to the synchronisation problem, and to knowing what the globally shared view is without having to trust anyone.", - "date": "November 9, 2008" - }, - { - "category": "double-spending", - "medium": "email", - "email_id": "10", - "text": "When there are multiple double-spent versions of the same transaction, one and only one will become valid.", - "date": "November 11, 2008" - }, - { - "category": "double-spending", - "medium": "email", - "email_id": "10", - "text": "The receiver of a payment must wait an hour or so before believing that it's valid. The network will resolve any possible double-spend races by then.", - "date": "November 11, 2008" - }, - { - "category": "nodes", - "medium": "email", - "email_id": "10", - "text": "With the transaction fee based incentive system I recently posted, nodes would have an incentive to include all the paying transactions they receive.", - "date": "November 11, 2008" - }, - { - "category": "motives", - "medium": "email", - "email_id": "12", - "text": "It's very attractive to the libertarian viewpoint if we can explain it properly. I'm better with code than with words though.", - "date": "November 13, 2008" - }, - { - "category": "releases", - "medium": "email", - "email_id": "13", - "text": "I'll try and hurry up and release the sourcecode as soon as possible to serve as a reference to help clear up all these implementation questions.", - "date": "November 17, 2008" - }, - { - "category": "proof-of-work", - "medium": "email", - "email_id": "14", - "text": "The credential that establishes someone as real is the ability to supply CPU proof-of-worker.", - "date": "November 17, 2008" - }, - { - "category": "nodes", - "medium": "email", - "email_id": "16", - "text": "If you can keep a node running that accepts incoming connections, you'll really be helping the network a lot. Port 8333 on your firewall needs to be open to receive incoming connections.", - "date": "January 9, 2009" - }, - { - "category": "micropayments", - "medium": "email", - "email_id": "17", - "text": "Subscription sites that need some extra proof-of-work for their free trial so it doesn't cannibalize subscriptions could charge bitcoins for the trial.", - "date": "January 17, 2009" - }, - { - "category": "cryptocurrency", - "text": "A purely peer-to-peer version of electronic cash would allow online payments to be sent directly from one party to another without going through a financial institution.", - "medium": "whitepaper", - "date": "October 31, 2008" } ] \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 5845b1d5d..d003e7bdd 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -286,6 +286,7 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int word_list = wrapper.wrap(text=text) multilineText = '\n'.join(word_list) + logger.debug("number of lines = {0}".format(len(word_list))) # logger.debug('multilineText') # logger.debug(multilineText) From 2e5a7b6a8654a83b089ef58b54b84eeb4b18af12 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sun, 2 Oct 2022 16:17:16 +0100 Subject: [PATCH 11/57] Reinstated all sat quotes and added temp max char limit in views_api for testing --- lnbits/extensions/gerty/static/satoshi.json | 1090 +++++++++++++++++++ lnbits/extensions/gerty/views_api.py | 13 +- 2 files changed, 1100 insertions(+), 3 deletions(-) diff --git a/lnbits/extensions/gerty/static/satoshi.json b/lnbits/extensions/gerty/static/satoshi.json index 499f55a14..1cff822aa 100644 --- a/lnbits/extensions/gerty/static/satoshi.json +++ b/lnbits/extensions/gerty/static/satoshi.json @@ -1,9 +1,1099 @@ [ + { + "category": "general", + "medium": "bitcointalk", + "text": "It would have been nice to get this attention in any other context. WikiLeaks has kicked the hornet's nest, and the swarm is headed towards us.", + "post_id": "542", + "date": "December 11, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "The project needs to grow gradually so the software can be strengthened along the way. I make this appeal to WikiLeaks not to try to use Bitcoin. Bitcoin is a small beta community in its infancy.", + "post_id": "523", + "date": "December 5, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "I'm happy if someone with artistic skill wants to contribute alternatives. The icon/logo was meant to be good as an icon at the 16x16 and 20x20 pixel sizes. I think it's the best program icon, but there's room for improvement at larger sizes for a graphic for use on websites. It'll be a lot simpler if authors could make their graphics public domain.", + "post_id": "500", + "date": "November 13, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "I wish rather than deleting the article, they put a length restriction. If something is not famous enough, there could at least be a stub article identifying what it is. I often come across annoying red links of things that Wiki ought to at least have heard of. \nThe article could be as simple as something like: \"Bitcoin is a peer-to-peer decentralised /link/electronic currency/link/.\" \nThe more standard Wiki thing to do is that we should have a paragraph in one of the more general categories that we are an instance of, like Electronic Currency or Electronic Cash. We can probably establish a paragraph there. Again, keep it short. Just identifying what it is.", + "post_id": "467", + "date": "September 30, 2010" + }, + { + "category": "transactions", + "medium": "bitcointalk", + "text": "As you figured out, the root problem is we shouldn't be counting or spending transactions until they have at least 1 confirmation. 0/unconfirmed transactions are very much second class citizens. At most, they are advice that something has been received, but counting them as balance or spending them is premature.", + "post_id": "464", + "date": "September 30, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Bitcoin would be convenient for people who don't have a credit card or don't want to use the cards they have, either don't want the spouse to see it on the bill or don't trust giving their number to \"porn guys\", or afraid of recurring billing.", + "post_id": "460", + "date": "September 23, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "I don't know anything about any of the bug trackers. If we were to have one, we would have to make a thoroughly researched choice. We're managing pretty well just using the forum. I'm more likely to see bugs posted in the forum, and I think other users are much more likely to help resolve and ask follow up questions here than if they were in a bug tracker. A key step is other users helping resolve the simple stuff that's not really a bug but some misunderstanding or confusion. I keep a list of all unresolved bugs I've seen on the forum. In some cases, I'm still thinking about the best design for the fix. This isn't the kind of software where we can leave so many unresolved bugs that we need a tracker for them.", + "post_id": "454", + "date": "September 19, 2010" + }, + { + "category": "scalability", + "medium": "bitcointalk", + "text": "The threshold can easily be changed in the future. We can decide to increase it when the time comes. It's a good idea to keep it lower as a circuit breaker and increase it as needed. If we hit the threshold now, it would almost certainly be some kind of flood and not actual use. Keeping the threshold lower would help limit the amount of wasted disk space in that event.", + "post_id": "441", + "date": "September 8, 2010" + }, + { + "category": "fees", + "medium": "bitcointalk", + "text": "Currently, paying a fee is controlled manually with the -paytxfee switch. It would be very easy to make the software automatically check the size of recent blocks to see if it should pay a fee. We're so far from reaching the threshold, we don't need that yet. It's a good idea to see how things go with controlling it manually first anyway.", + "post_id": "441", + "date": "September 8, 2010" + }, + { + "category": "fees, nodes", + "medium": "bitcointalk", + "text": "Another option is to reduce the number of free transactions allowed per block before transaction fees are required. Nodes only take so many KB of free transactions per block before they start requiring at least 0.01 transaction fee. The threshold should probably be lower than it currently is. I don't think the threshold should ever be 0. We should always allow at least some free transactions.", + "post_id": "439", + "date": "September 7, 2010" + }, + { + "category": "economics", + "medium": "bitcointalk", + "text": "As a thought experiment, imagine there was a base metal as scarce as gold but with the following properties:\n- boring grey in colour\n- not a good conductor of electricity\n- not particularly strong, but not ductile or easily malleable either\n- not useful for any practical or ornamental purpose\n\nand one special, magical property:\n- can be transported over a communications channel\n\nIf it somehow acquired any value at all for whatever reason, then anyone wanting to transfer wealth over a long distance could buy some, transmit it, and have the recipient sell it.\n\nMaybe it could get an initial value circularly as you've suggested, by people foreseeing its potential usefulness for exchange. (I would definitely want some) Maybe collectors, any random reason could spark it.\n\nI think the traditional qualifications for money were written with the assumption that there are so many competing objects in the world that are scarce, an object with the automatic bootstrap of intrinsic value will surely win out over those without intrinsic value. But if there were nothing in the world with intrinsic value that could be used as money, only scarce but no intrinsic value, I think people would still take up something.\n\n(I'm using the word scarce here to only mean limited potential supply)", + "post_id": "428", + "date": "August 27, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "Bitcoins have no dividend or potential future dividend, therefore not like a stock.\n\nMore like a collectible or commodity.", + "post_id": "427", + "date": "August 27, 2010" + }, + { + "category": "proof-of-work", + "medium": "bitcointalk", + "text": "There is no way for the software to automatically know if one chain is better than another except by the greatest proof-of-work. In the design it was necessary for it to switch to a longer chain no matter how far back it has to go.", + "post_id": "394", + "date": "August 16, 2010" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "Some places where generation will gravitate to: \n1) places where it's cheapest or free\n2) people who want to help for idealogical reasons\n3) people who want to get some coins without the inconvenience of doing a transaction to buy them\n\nThere are legitimate places where it's free. Generation is basically free anywhere that has electric heat, since your computer's heat is offsetting your baseboard electric heating. Many small flats have electric heat out of convenience.", + "post_id": "364", + "date": "August 15, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Then you must also be against the common system of payment up front, where the customer loses.\nPayment up front: customer loses, and the thief gets the money.\nSimple escrow: customer loses, but the thief doesn't get the money either.\nAre you guys saying payment up front is better, because at least the thief gets the money, so at least someone gets it?\nImagine someone stole something from you. You can't get it back, but if you could, if it had a kill switch that could be remote triggered, would you do it? Would it be a good thing for thieves to know that everything you own has a kill switch and if they steal it, it'll be useless to them, although you still lose it too? If they give it back, you can re-activate it.\nImagine if gold turned to lead when stolen. If the thief gives it back, it turns to gold again.\nIt still seems to me the problem may be one of presenting it the right way. For one thing, not being so blunt about \"money burning\" for the purposes of game theory discussion. The money is never truly burned. You have the option to release it at any time forever.", + "post_id": "340", + "date": "August 11, 2010" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "The heat from your computer is not wasted if you need to heat your home. If you're using electric heat where you live, then your computer's heat isn't a waste. It's equal cost if you generate the heat with your computer. \nIf you have other cheaper heating than electric, then the waste is only the difference in cost.\nIf it's summer and you're using A/C, then it's twice. \nBitcoin generation should end up where it's cheapest. Maybe that will be in cold climates where there's electric heat, where it would be essentially free.", + "post_id": "337", + "date": "August 9, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "It's the same situation as gold and gold mining. The marginal cost of gold mining tends to stay near the price of gold. Gold mining is a waste, but that waste is far less than the utility of having gold available as a medium of exchange. \nI think the case will be the same for Bitcoin. The utility of the exchanges made possible by Bitcoin will far exceed the cost of electricity used. Therefore, not having Bitcoin would be the net waste.", + "post_id": "327", + "date": "August 7, 2010" + }, + { + "category": "proof-of-work", + "medium": "bitcointalk", + "text": "Proof-of-work has the nice property that it can be relayed through untrusted middlemen. We don't have to worry about a chain of custody of communication. It doesn't matter who tells you a longest chain, the proof-of-work speaks for itself.", + "post_id": "327", + "date": "August 7, 2010" + }, + { + "category": "micropayments", + "medium": "bitcointalk", + "text": "Forgot to add the good part about micropayments. While I don't think Bitcoin is practical for smaller micropayments right now, it will eventually be as storage and bandwidth costs continue to fall. If Bitcoin catches on on a big scale, it may already be the case by that time. Another way they can become more practical is if I implement client-only mode and the number of network nodes consolidates into a smaller number of professional server farms. Whatever size micropayments you need will eventually be practical. I think in 5 or 10 years, the bandwidth and storage will seem trivial.", + "post_id": "318", + "date": "August 5, 2010" + }, + { + "category": "micropayments", + "medium": "bitcointalk", + "text": "Bitcoin isn't currently practical for very small micropayments. Not for things like pay per search or per page view without an aggregating mechanism, not things needing to pay less than 0.01. The dust spam limit is a first try at intentionally trying to prevent overly small micropayments like that. \nBitcoin is practical for smaller transactions than are practical with existing payment methods. Small enough to include what you might call the top of the micropayment range. But it doesn't claim to be practical for arbitrarily small micropayments.", + "post_id": "317", + "date": "August 4, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Actually, it works well to just PM me. I'm the one who's going to be fixing it. If you find a security flaw, I would definitely like to hear from you privately to fix it before it goes public.", + "post_id": "294", + "date": "July 29, 2010" + }, + { + "category": "nodes", + "medium": "bitcointalk", + "text": "The current system where every user is a network node is not the intended configuration for large scale. That would be like every Usenet user runs their own NNTP server. The design supports letting users just be users. The more burden it is to run a node, the fewer nodes there will be. Those few nodes will be big server farms. The rest will be client nodes that only do transactions and don't generate.", + "post_id": "287", + "date": "July 29, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "For future reference, here's my public key. It's the same one that's been there since the bitcoin.org site first went up in 2008. Grab it now in case you need it later. http://www.bitcoin.org/Satoshi_Nakamoto.asc", + "post_id": "276", + "date": "July 25, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "By making some adjustments to the database settings, I was able to make the initial block download about 5 times faster. It downloads in about 30 minutes. \n \nThe database default had it writing each block to disk synchronously, which is not necessary. I changed the settings to let it cache the changes in memory and write them out in a batch. Blocks are still written transactionally, so either the complete change occurs or none of it does, in either case the data is left in a valid state. \n \nI only enabled this change during the initial block download. When you come within 2000 blocks of the latest block, these changes turn off and it slows down to the old way.", + "post_id": "258", + "date": "July 23, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "The timing is strange, just as we are getting a rapid increase in 3rd party coverage after getting slashdotted. I hope there's not a big hurry to wrap the discussion and decide. How long does Wikipedia typically leave a question like that open for comment? \nIt would help to condense the article and make it less promotional sounding as soon as possible. Just letting people know what it is, where it fits into the electronic money space, not trying to convince them that it's good. They probably want something that just generally identifies what it is, not tries to explain all about how it works.", + "post_id": "249", + "date": "July 10, 2010" + }, + { + "category": "difficulty", + "medium": "bitcointalk", + "text": "Right, the difficulty adjustment is trying to keep it so the network as a whole generates an average of 6 blocks per hour. The time for your block to mature will always be around 20 hours.", + "post_id": "225", + "date": "July 16, 2010" + }, + { + "category": "difficulty", + "medium": "bitcointalk", + "text": "Difficulty just increased by 4 times, so now your cost is US$0.02/BTC.", + "post_id": "223", + "date": "July 16, 2010" + }, + { + "category": "scalability, nodes", + "medium": "bitcointalk", + "text": "The design outlines a lightweight client that does not need the full block chain. In the design PDF it's called Simplified Payment Verification. The lightweight client can send and receive transactions, it just can't generate blocks. It does not need to trust a node to verify payments, it can still verify them itself. \nThe lightweight client is not implemented yet, but the plan is to implement it when it's needed. For now, everyone just runs a full network node.", + "post_id": "188", + "date": "July 14, 2010" + }, + { + "category": "scalability, nodes", + "medium": "bitcointalk", + "text": "I anticipate there will never be more than 100K nodes, probably less. It will reach an equilibrium where it's not worth it for more nodes to join in. The rest will be lightweight clients, which could be millions.", + "post_id": "188", + "date": "July 14, 2010" + }, + { + "category": "nodes", + "medium": "bitcointalk", + "text": "At equilibrium size, many nodes will be server farms with one or two network nodes that feed the rest of the farm over a LAN.", + "post_id": "188", + "date": "July 14, 2010" + }, + { + "category": "economics", + "medium": "bitcointalk", + "text": "When someone tries to buy all the world's supply of a scarce asset, the more they buy the higher the price goes. At some point, it gets too expensive for them to buy any more. It's great for the people who owned it beforehand because they get to sell it to the corner at crazy high prices. As the price keeps going up and up, some people keep holding out for yet higher prices and refuse to sell.", + "post_id": "174", + "date": "July 9, 2010" + }, + { + "category": "releases", + "medium": "bitcointalk", + "text": "Announcing version 0.3 of Bitcoin, the P2P cryptocurrency! Bitcoin is a digital currency using cryptography and a distributed network to replace the need for a trusted central server. Escape the arbitrary inflation risk of centrally managed currencies! Bitcoin's total circulation is limited to 21 million coins. The coins are gradually released to the network's nodes based on the CPU proof-of-worker they contribute, so you can get a share of them by contributing your idle CPU time.", + "post_id": "168", + "date": "July 6, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Writing a description for this thing for general audiences is bloody hard. There's nothing to relate it to.", + "post_id": "167", + "date": "July 5, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "Lost coins only make everyone else's coins worth slightly more. Think of it as a donation to everyone.", + "post_id": "131", + "date": "June 21, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "Excellent choice of a first project, nice work. I had planned to do this exact thing if someone else didn't do it, so when it gets too hard for mortals to generate 50BTC, new users could get some coins to play with right away. Donations should be able to keep it filled. The display showing the balance in the dispenser encourages people to top it up.\n\nYou should put a donation bitcoin address on the page for those who want to add funds to it, which ideally should update to a new address whenever it receives something.", + "post_id": "129", + "date": "June 18, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Since 2007. At some point I became convinced there was a way to do this without any trust required at all and couldn't resist to keep thinking about it. Much more of the work was designing than coding.\n\nFortunately, so far all the issues raised have been things I previously considered and planned for.", + "post_id": "127", + "date": "June 18, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "The nature of Bitcoin is such that once version 0.1 was released, the core design was set in stone for the rest of its lifetime. Because of that, I wanted to design it to support every possible transaction type I could think of. The problem was, each thing required special support code and data fields whether it was used or not, and only covered one special case at a time. It would have been an explosion of special cases. The solution was script, which generalizes the problem so transacting parties can describe their transaction as a predicate that the node network evaluates. The nodes only need to understand the transaction to the extent of evaluating whether the sender's conditions are met.", + "post_id": "126", + "date": "June 17, 2010" + }, + { + "category": "transactions, bitcoin-design", + "medium": "bitcointalk", + "text": "The design supports a tremendous variety of possible transaction types that I designed years ago. Escrow transactions, bonded contracts, third party arbitration, multi-party signature, etc. If Bitcoin catches on in a big way, these are things we'll want to explore in the future, but they all had to be designed at the beginning to make sure they would be possible later.", + "post_id": "126", + "date": "June 17, 2010" + }, + { + "category": "encryption", + "medium": "bitcointalk", + "text": "SHA-256 is very strong. It's not like the incremental step from MD5 to SHA1. It can last several decades unless there's some massive breakthrough attack.", + "post_id": "119", + "date": "June 14, 2010" + }, + { + "category": "encryption", + "medium": "bitcointalk", + "text": "If SHA-256 became completely broken, I think we could come to some agreement about what the honest block chain was before the trouble started, lock that in and continue from there with a new hash function.", + "post_id": "119", + "date": "June 14, 2010" + }, + { + "category": "releases", + "medium": "bitcointalk", + "text": "Does anyone want to translate the Bitcoin client itself? It would be great to have at least one other language in the 0.3 release.", + "post_id": "111", + "date": "May 26, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Simplified Payment Verification is for lightweight client-only users who only do transactions and don't generate and don't participate in the node network. They wouldn't need to download blocks, just the hash chain, which is currently about 2MB and very quick to verify (less than a second to verify the whole chain). If the network becomes very large, like over 100,000 nodes, this is what we'll use to allow common users to do transactions without being full blown nodes. At that stage, most users should start running client-only software and only the specialist server farms keep running full network nodes, kind of like how the usenet network has consolidated. \nSPV is not implemented yet, and won't be implemented until far in the future, but all the current implementation is designed around supporting it.", + "post_id": "105", + "date": "May 18, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "Bitcoin addresses you generate are kept forever. A bitcoin address must be kept to show ownership of anything sent to it. If you were able to delete a bitcoin address and someone sent to it, the money would be lost. They're only about 500 bytes.", + "post_id": "102", + "date": "May 16, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "When you generate a new bitcoin address, it only takes disk space on your own computer (like 500 bytes). It's like generating a new PGP private key, but less CPU intensive because it's ECC. The address space is effectively unlimited. It doesn't hurt anyone, so generate all you want.", + "post_id": "98", + "date": "May 16, 2010" + }, + { + "category": "general", + "medium": "bitcointalk", + "text": "The price of .com registrations is lower than it should be, therefore any good name you might think of is always already taken by some domain name speculator. Fortunately, it's standard for open source projects to be .org.", + "post_id": "94", + "date": "March 23, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "How does everyone feel about the B symbol with the two lines through the outside? Can we live with that as our logo?", + "post_id": "83", + "date": "February 26, 2010" + }, + { + "category": "transactions", + "medium": "bitcointalk", + "text": "That would be nice at point-of-sale. The cash register displays a QR-code encoding a bitcoin address and amount on a screen and you photo it with your mobile.", + "post_id": "73", + "date": "February 24, 2010" + }, + { + "category": "economics", + "medium": "bitcointalk", + "text": "A rational market price for something that is expected to increase in value will already reflect the present value of the expected future increases. In your head, you do a probability estimate balancing the odds that it keeps increasing.", + "post_id": "65", + "date": "February 21, 2010" + }, + { + "category": "economics, bitcoin-economics", + "medium": "bitcointalk", + "text": "The price of any commodity tends to gravitate toward the production cost. If the price is below cost, then production slows down. If the price is above cost, profit can be made by generating and selling more. At the same time, the increased production would increase the difficulty, pushing the cost of generating towards the price.", + "post_id": "65", + "date": "February 21, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "At the moment, generation effort is rapidly increasing, suggesting people are estimating the present value to be higher than the current cost of production.", + "post_id": "65", + "date": "February 21, 2010" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "I'm sure that in 20 years there will either be very large transaction volume or no volume.", + "post_id": "57", + "date": "February 14, 2010" + }, + { + "category": "bitcoin-economics, fees", + "medium": "bitcointalk", + "text": "In a few decades when the reward gets too small, the transaction fee will become the main compensation for nodes.", + "post_id": "57", + "date": "February 14, 2010" + }, + { + "category": "nodes, mining, fees", + "medium": "bitcointalk", + "text": "If you're sad about paying the fee, you could always turn the tables and run a node yourself and maybe someday rake in a 0.44 fee yourself.", + "post_id": "56", + "date": "February 14, 2010" + }, { "category": "bitcoin-economics, bitcoin-design", "medium": "bitcointalk", "text": "Eventually at most only 21 million coins for 6.8 billion people in the world if it really gets huge.\n\nBut don't worry, there are another 6 decimal places that aren't shown, for a total of 8 decimal places internally. It shows 1.00 but internally it's 1.00000000. If there's massive deflation in the future, the software could show more decimal places.", "post_id": "46", "date": "February 6, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "If it gets tiresome working with small numbers, we could change where the display shows the decimal point. Same amount of money, just different convention for where the \",\"'s and \".\"'s go. e.g. moving the decimal place 3 places would mean if you had 1.00000 before, now it shows it as 1,000.00.", + "post_id": "46", + "date": "February 6, 2010" + }, + { + "category": "privacy", + "medium": "bitcointalk", + "text": "Bitcoin is still very new and has not been independently analysed. If you're serious about privacy, TOR is an advisable precaution.", + "post_id": "45", + "date": "February 6, 2010" + }, + { + "category": "privacy", + "medium": "bitcointalk", + "text": "You could use TOR if you don't want anyone to know you're even using Bitcoin.", + "post_id": "45", + "date": "February 6, 2010" + }, + { + "category": "bitcoin-design", + "medium": "bitcointalk", + "text": "I very much wanted to find some way to include a short message, but the problem is, the whole world would be able to see the message. As much as you may keep reminding people that the message is completely non-private, it would be an accident waiting to happen.", + "post_id": "33", + "date": "January 28, 2010" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "The average total coins generated across the network per day stays the same. Faster machines just get a larger share than slower machines. If everyone bought faster machines, they wouldn't get more coins than before.", + "post_id": "20", + "date": "December 12, 2009" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "We should have a gentleman's agreement to postpone the GPU arms race as long as we can for the good of the network. It's much easer to get new users up to speed if they don't have to worry about GPU drivers and compatibility. It's nice how anyone with just a CPU can compete fairly equally right now.", + "post_id": "20", + "date": "December 12, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "bitcointalk", + "text": "Those coins can never be recovered, and the total circulation is less. Since the effective circulation is reduced, all the remaining coins are worth slightly more. It's the opposite of when a government prints money and the value of existing money goes down.", + "post_id": "17", + "date": "December 10, 2009" + }, + { + "category": "trusted-third-parties", + "text": "Being open source means anyone can independently review the code. If it was closed source, nobody could verify the security. I think it's essential for a program of this nature to be open source.", + "medium": "bitcointalk", + "post_id": "17", + "date": "December 10, 2009" + }, + { + "category": "privacy, transactions", + "medium": "bitcointalk", + "text": "For greater privacy, it's best to use bitcoin addresses only once.", + "post_id": "11", + "date": "November 25, 2009" + }, + { + "category": "mining", + "medium": "bitcointalk", + "text": "Think of it as a cooperative effort to make a chain. When you add a link, you must first find the current end of the chain. If you were to locate the last link, then go off for an hour and forge your link, come back and link it to the link that was the end an hour ago, others may have added several links since then and they're not going to want to use your link that now branches off the middle.", + "post_id": "8", + "date": "November 22, 2009" + }, + { + "category": "bitcoin-design", + "medium": "p2pfoundation", + "text": "It is a global distributed database, with additions to the database by consent of the majority, based on a set of rules they follow: \n\n- Whenever someone finds proof-of-work to generate a block, they get some new coins\n- The proof-of-work difficulty is adjusted every two weeks to target an average of 6 blocks per hour (for the whole network)\n- The coins given per block is cut in half every 4 years", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "p2pfoundation", + "text": "You could say coins are issued by the majority. They are issued in a limited, predetermined amount.", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "p2pfoundation", + "text": "To Sepp's question, indeed there is nobody to act as central bank or federal reserve to adjust the money supply as the population of users grows. That would have required a trusted party to determine the value, because I don't know a way for software to know the real world value of things.", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "p2pfoundation", + "text": "In this sense, it's more typical of a precious metal. Instead of the supply changing to keep the value the same, the supply is predetermined and the value changes. As the number of users grows, the value per coin increases. It has the potential for a positive feedback loop; as users increase, the value goes up, which could attract more users to take advantage of the increasing value.", + "post_id": "3", + "date": "February 18, 2009" + }, + { + "category": "cryptocurrency", + "medium": "p2pfoundation", + "text": "A lot of people automatically dismiss e-currency as a lost cause because of all the companies that failed since the 1990's. I hope it's obvious it was only the centrally controlled nature of those systems that doomed them. I think this is the first time we're trying a decentralized, non-trust-based system.", + "post_id": "2", + "date": "February 15, 2009" + }, + { + "category": "releases, bitcoin-design", + "medium": "p2pfoundation", + "text": "I've developed a new open source P2P e-cash system called Bitcoin. It's completely decentralized, with no central server or trusted parties, because everything is based on crypto proof instead of trust. Give it a try, or take a look at the screenshots and design paper: \n\nDownload Bitcoin v0.1 at http://www.bitcoin.org", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "economics", + "medium": "p2pfoundation", + "text": "The root problem with conventional currency is all the trust that's required to make it work. The central bank must be trusted not to debase the currency, but the history of fiat currencies is full of breaches of that trust.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "micropayments, privacy, banks", + "medium": "p2pfoundation", + "text": "Banks must be trusted to hold our money and transfer it electronically, but they lend it out in waves of credit bubbles with barely a fraction in reserve. We have to trust them with our privacy, trust them not to let identity thieves drain our accounts. Their massive overhead costs make micropayments impossible.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "encryption", + "medium": "p2pfoundation", + "text": "A generation ago, multi-user time-sharing computer systems had a similar problem. Before strong encryption, users had to rely on password protection to secure their files, placing trust in the system administrator to keep their information private. Privacy could always be overridden by the admin based on his judgment call weighing the principle of privacy against other concerns, or at the behest of his superiors. Then strong encryption became available to the masses, and trust was no longer required. Data could be secured in a way that was physically impossible for others to access, no matter for what reason, no matter how good the excuse, no matter what.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "cryptocurrency", + "medium": "p2pfoundation", + "text": "With e-currency based on cryptographic proof, without the need to trust a third party middleman, money can be secure and transactions effortless.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "transactions", + "medium": "p2pfoundation", + "text": "A digital coin contains the public key of its owner. To transfer it, the owner signs the coin together with the public key of the next owner. Anyone can check the signatures to verify the chain of ownership.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "double-spending", + "medium": "p2pfoundation", + "text": "Any owner could try to re-spend an already spent coin by signing it again to another owner. The usual solution is for a trusted company with a central database to check for double-spending, but that just gets back to the trust model. In its central position, the company can override the users, and the fees needed to support the company make micropayments impractical. \nBitcoin's solution is to use a peer-to-peer network to check for double-spending. In a nutshell, the network works like a distributed timestamp server, stamping the first transaction to spend a coin. It takes advantage of the nature of information being easy to spread but hard to stifle.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "bitcoin-design", + "medium": "p2pfoundation", + "text": "The result is a distributed system with no single point of failure. Users hold the crypto keys to their own money and transact directly with each other, with the help of the P2P network to check for double-spending.", + "post_id": "1", + "date": "February 11, 2009" + }, + { + "category": "identity", + "medium": "p2pfoundation", + "text": "I am not Dorian Nakamoto.", + "post_id": "4", + "date": "March 7, 2014" + }, + { + "category": "bitcoin-design", + "medium": "email", + "email_id": "1", + "text": "I've been working on a new electronic cash system that's fully peer-to-peer, with no trusted third party.", + "date": "November 1, 2008" + }, + { + "category": "bitcoin-design", + "medium": "email", + "email_id": "1", + "text": "The main properties: \n Double-spending is prevented with a peer-to-peer network.\n No mint or other trusted parties.\n Participants can be anonymous.\n New coins are made from Hashcash style proof-of-work.\n The proof-of-work for new coin generation also proof-of-workers the network to prevent double-spending.", + "date": "November 1, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "2", + "text": "Long before the network gets anywhere near as large as that, it would be safe for users to use Simplified Payment Verification (section 8) to check for double spending, which only requires having the chain of block headers, or about 12KB per day.", + "date": "November 2, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "2", + "text": "Only people trying to create new coins would need to run network nodes.", + "date": "November 2, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "2", + "text": "At first, most users would run network nodes, but as the network grows beyond a certain point, it would be left more and more to specialists with server farms of specialized hardware. A server farm would only need to have one node on the network and the rest of the LAN connects with that one node.", + "date": "November 2, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "The requirement is that the good guys collectively have more CPU proof-of-worker than any single attacker.", + "date": "November 3, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "There would be many smaller zombie farms that are not big enough to overproof-of-worker the network, and they could still make money by generating bitcoins. The smaller farms are then the \"honest nodes\". (I need a better term than \"honest\") The more smaller farms resort to generating bitcoins, the higher the bar gets to overproof-of-worker the network, making larger farms also too small to overproof-of-worker it so that they may as well generate bitcoins too. According to the \"long tail\" theory, the small, medium and merely large farms put together should add up to a lot more than the biggest zombie farm.", + "date": "November 3, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "Even if a bad guy does overproof-of-worker the network, it's not like he's instantly rich. All he can accomplish is to take back money he himself spent, like bouncing a check. To exploit it, he would have to buy something from a merchant, wait till it ships, then overproof-of-worker the network and try to take his money back. I don't think he could make as much money trying to pull a carding scheme like that as he could by generating bitcoins. With a zombie farm that big, he could generate more bitcoins than everyone else combined.", + "date": "November 3, 2008" + }, + { + "category": "mining", + "medium": "email", + "email_id": "3", + "text": "The Bitcoin network might actually reduce spam by diverting zombie farms to generating bitcoins instead.", + "date": "November 3, 2008" + }, + { + "category": "motives", + "medium": "email", + "email_id": "4", + "text": "Yes, but we can win a major battle in the arms race and gain a new territory of freedom for several years.", + "date": "November 7, 2008" + }, + { + "category": "p2p-networks, government", + "medium": "email", + "email_id": "4", + "text": "Governments are good at cutting off the heads of a centrally controlled networks like Napster, but pure P2P networks like Gnutella and Tor seem to be holding their own.", + "date": "November 7, 2008" + }, + { + "category": "mining, difficulty", + "medium": "email", + "email_id": "5", + "text": "As computers get faster and the total computing proof-of-worker applied to creating bitcoins increases, the difficulty increases proportionally to keep the total new production constant. Thus, it is known in advance how many new bitcoins will be created every year in the future.", + "date": "November 8, 2008" + }, + { + "category": "bitcoin-economics", + "medium": "email", + "email_id": "5", + "text": "The fact that new coins are produced means the money supply increases by a planned amount, but this does not necessarily result in inflation. If the supply of money increases at the same rate that the number of people using it increases, prices remain stable. If it does not increase as fast as demand, there will be deflation and early holders of money will see its value increase. Coins have to get initially distributed somehow, and a constant rate seems like the best formula.", + "date": "November 8, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "6", + "text": "Right, nodes keep transactions in their working set until they get into a block. If a transaction reaches 90% of nodes, then each time a new block is found, it has a 90% chance of being in it.", + "date": "November 9, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "6", + "text": "Receivers of transactions will normally need to hold transactions for perhaps an hour or more to allow time for this kind of possibility to be resolved. They can still re-spend the coins immediately, but they should wait before taking an action such as shipping goods.", + "date": "November 9, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "6", + "text": "The attacker isn't adding blocks to the end. He has to go back and redo the block his transaction is in and all the blocks after it, as well as any new blocks the network keeps adding to the end while he's doing that. He's rewriting history. Once his branch is longer, it becomes the new valid one.", + "date": "November 9, 2008" + }, + { + "category": "nodes, mining, proof-of-work", + "medium": "email", + "email_id": "6", + "text": "It is strictly necessary that the longest chain is always considered the valid one. Nodes that were present may remember that one branch was there first and got replaced by another, but there would be no way for them to convince those who were not present of this. We can't have subfactions of nodes that cling to one branch that they think was first, others that saw another branch first, and others that joined later and never saw what happened. The CPU proof-of-worker proof-of-work vote must have the final say. The only way for everyone to stay on the same page is to believe that the longest chain is always the valid one, no matter what.", + "date": "November 9, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "6", + "text": "The recipient just needs to verify it back to a depth that is sufficiently far back in the block chain, which will often only require a depth of 2 transactions. All transactions before that can be discarded.", + "date": "November 9, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "6", + "text": "When a node receives a block, it checks the signatures of every transaction in it against previous transactions in blocks. Blocks can only contain transactions that depend on valid transactions in previous blocks or the same block. Transaction C could depend on transaction B in the same block and B depends on transaction A in an earlier block.", + "date": "November 9, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "7", + "text": "It's not a problem if transactions have to wait one or a few extra cycles to get into a block.", + "date": "November 9, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "8", + "text": "The proof-of-work chain is the solution to the synchronisation problem, and to knowing what the globally shared view is without having to trust anyone.", + "date": "November 9, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "8", + "text": "A transaction will quickly propagate throughout the network, so if two versions of the same transaction were reported at close to the same time, the one with the head start would have a big advantage in reaching many more nodes first. Nodes will only accept the first one they see, refusing the second one to arrive, so the earlier transaction would have many more nodes working on incorporating it into the next proof-of-work. In effect, each node votes for its viewpoint of which transaction it saw first by including it in its proof-of-work effort. If the transactions did come at exactly the same time and there was an even split, it's a toss up based on which gets into a proof-of-work first, and that decides which is valid.", + "date": "November 9, 2008" + }, + { + "category": "nodes, proof-of-work", + "medium": "email", + "email_id": "8", + "text": "When a node finds a proof-of-work, the new block is propagated throughout the network and everyone adds it to the chain and starts working on the next block after it. Any nodes that had the other transaction will stop trying to include it in a block, since it's now invalid according to the accepted chain.", + "date": "November 9, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "8", + "text": "The proof-of-work chain is itself self-evident proof that it came from the globally shared view. Only the majority of the network together has enough CPU proof-of-worker to generate such a difficult chain of proof-of-work. Any user, upon receiving the proof-of-work chain, can see what the majority of the network has approved. Once a transaction is hashed into a link that's a few links back in the chain, it is firmly etched into the global history.", + "date": "November 9, 2008" + }, + { + "category": "fees, bitcoin-economics", + "medium": "email", + "email_id": "9", + "text": "If you're having trouble with the inflation issue, it's easy to tweak it for transaction fees instead. It's as simple as this: let the output value from any transaction be 1 cent less than the input value. Either the client software automatically writes transactions for 1 cent more than the intended payment value, or it could come out of the payee's side. The incentive value when a node finds a proof-of-work for a block could be the total of the fees in the block.", + "date": "November 10, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "10", + "text": "When there are multiple double-spent versions of the same transaction, one and only one will become valid.", + "date": "November 11, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "10", + "text": "The receiver of a payment must wait an hour or so before believing that it's valid. The network will resolve any possible double-spend races by then.", + "date": "November 11, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "10", + "text": "The guy who received the double-spend that became invalid never thought he had it in the first place. His software would have shown the transaction go from \"unconfirmed\" to \"invalid\". If necessary, the UI can be made to hide transactions until they're sufficiently deep in the block chain.", + "date": "November 11, 2008" + }, + { + "category": "difficulty", + "medium": "email", + "email_id": "10", + "text": "The target time between blocks will probably be 10 minutes. Every block includes its creation time. If the time is off by more than 36 hours, other nodes won't work on it. If the timespan over the last 6*24*30 blocks is less than 15 days, blocks are being generated too fast and the proof-of-work difficulty doubles. Everyone does the same calculation with the same chain data, so they all get the same result at the same link in the chain.", + "date": "November 11, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "10", + "text": "Instantant non-repudiability is not a feature, but it's still much faster than existing systems. Paper cheques can bounce up to a week or two later. Credit card transactions can be contested up to 60 to 180 days later. Bitcoin transactions can be sufficiently irreversible in an hour or two.", + "date": "November 11, 2008" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "10", + "text": "With the transaction fee based incentive system I recently posted, nodes would have an incentive to include all the paying transactions they receive.", + "date": "November 11, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "11", + "text": "The proof-of-work chain is a solution to the Byzantine Generals' Problem. I'll try to rephrase it in that context.\nA number of Byzantine Generals each have a computer and want to attack the King's wi-fi by brute forcing the password, which they've learned is a certain number of characters in length. Once they stimulate the network to generate a packet, they must crack the password within a limited time to break in and erase the logs, otherwise they will be discovered and get in trouble. They only have enough CPU proof-of-worker to crack it fast enough if a majority of them attack at the same time. \n They don't particularly care when the attack will be, just that they all agree. It has been decided that anyone who feels like it will announce a time, and whatever time is heard first will be the official attack time. The problem is that the network is not instantaneous, and if two generals announce different attack times at close to the same time, some may hear one first and others hear the other first. They use a proof-of-work chain to solve the problem. Once each general receives whatever attack time he hears first, he sets his computer to solve an extremely difficult proof-of-work problem that includes the attack time in its hash. The proof-of-work is so difficult, it's expected to take 10 minutes of them all working at once before one of them finds a solution. Once one of the generals finds a proof-of-work, he broadcasts it to the network, and everyone changes their current proof-of-work computation to include that proof-of-work in the hash they're working on. If anyone was working on a different attack time, they switch to this one, because its proof-of-work chain is now longer.\n After two hours, one attack time should be hashed by a chain of 12 proofs-of-work. Every general, just by verifying the difficulty of the proof-of-work chain, can estimate how much parallel CPU proof-of-worker per hour was expended on it and see that it must have required the majority of the computers to produce that much proof-of-work in the allotted time. They had to all have seen it because the proof-of-work is proof that they worked on it. If the CPU proof-of-worker exhibited by the proof-of-work chain is sufficient to crack the password, they can safely attack at the agreed time.\n The proof-of-work chain is how all the synchronisation, distributed database and global view problems you've asked about are solved.", + "date": "November 13, 2008" + }, + { + "category": "nodes, mining", + "medium": "email", + "email_id": "12", + "text": "Broadcasts will probably be almost completely reliable. TCP transmissions are rarely ever dropped these days, and the broadcast protocol has a retry mechanism to get the data from other nodes after a while. If broadcasts turn out to be slower in practice than expected, the target time between blocks may have to be increased to avoid wasting resources. We want blocks to usually propagate in much less time than it takes to generate them, otherwise nodes would spend too much time working on obsolete blocks.", + "date": "November 14, 2008" + }, + { + "category": "motives", + "medium": "email", + "email_id": "12", + "text": "It's very attractive to the libertarian viewpoint if we can explain it properly. I'm better with code than with words though.", + "date": "November 13, 2008" + }, + { + "category": "releases", + "medium": "email", + "email_id": "13", + "text": "I'll try and hurry up and release the sourcecode as soon as possible to serve as a reference to help clear up all these implementation questions.", + "date": "November 17, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "13", + "text": "A basic transaction is just what you see in the figure in section 2. A signature (of the buyer) satisfying the public key of the previous transaction, and a new public key (of the seller) that must be satisfied to spend it the next time.", + "date": "November 17, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "13", + "text": "There's no need for reporting of \"proof of double spending\" like that. If the same chain contains both spends, then the block is invalid and rejected. \n Same if a block didn't have enough proof-of-work. That block is invalid and rejected. There's no need to circulate a report about it. Every node could see that and reject it before relaying it.", + "date": "November 17, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "13", + "text": "We're not \"on the lookout\" for double spends to sound the alarm and catch the cheater. We merely adjudicate which one of the spends is valid. Receivers of transactions must wait a few blocks to make sure that resolution has had time to complete. Would be cheaters can try and simultaneously double-spend all they want, and all they accomplish is that within a few blocks, one of the spends becomes valid and the others become invalid. Any later double-spends are immediately rejected once there's already a spend in the main chain.", + "date": "November 17, 2008" + }, + { + "category": "proof-of-work, mining", + "medium": "email", + "email_id": "13", + "text": "The proof-of-work is a Hashcash style SHA-256 collision finding. It's a memoryless process where you do millions of hashes a second, with a small chance of finding one each time. The 3 or 4 fastest nodes' dominance would only be proportional to their share of the total CPU proof-of-worker. Anyone's chance of finding a solution at any time is proportional to their CPU proof-of-worker.", + "date": "November 17, 2008" + }, + { + "category": "bitcoin-economics", + "medium": "email", + "email_id": "13", + "text": "There will be transaction fees, so nodes will have an incentive to receive and include all the transactions they can. Nodes will eventually be compensated by transaction fees alone when the total coins created hits the pre-determined ceiling.", + "date": "November 17, 2008" + }, + { + "category": "proof-of-work", + "medium": "email", + "email_id": "14", + "text": "The credential that establishes someone as real is the ability to supply CPU proof-of-worker.", + "date": "November 17, 2008" + }, + { + "category": "double-spending", + "medium": "email", + "email_id": "14", + "text": "The race is to spread your transaction on the network first. Think 6 degrees of freedom -- it spreads exponentially. It would only take something like 2 minutes for a transaction to spread widely enough that a competitor starting late would have little chance of grabbing very many nodes before the first one is overtaking the whole network. During those 2 minutes, the merchant's nodes can be watching for a double-spent transaction. The double-spender would not be able to blast his alternate transaction out to the world without the merchant getting it, so he has to wait before starting. \n If the real transaction reaches 90% and the double-spent tx reaches 10%, the double-spender only gets a 10% chance of not paying, and 90% chance his money gets spent. For almost any type of goods, that's not going to be worth it for the scammer.", + "date": "November 17, 2008" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "14", + "text": "If a merchant actually has a problem with theft, they can make the customer wait 2 minutes, or wait for something in e-mail, which many already do. If they really want to optimize, and it's a large download, they could cancel the download in the middle if the transaction comes back double-spent. If it's website access, typically it wouldn't be a big deal to let the customer have access for 5 minutes and then cut off access if it's rejected. Many such sites have a free trial anyway.", + "date": "November 17, 2008" + }, + { + "category": "releases, bitcoin-design", + "medium": "email", + "email_id": "15", + "text": "I believe I've worked through all those little details over the last year and a half while coding it, and there were a lot of them. The functional details are not covered in the paper, but the sourcecode is coming soon. I sent you the main files. (available by request at the moment, full release soon)", + "date": "November 17, 2008" + }, + { + "category": "releases", + "medium": "email", + "email_id": "16", + "text": "Announcing the first release of Bitcoin, a new electronic cash system that uses a peer-to-peer network to prevent double-spending. It's completely decentralized with no server or central authority.", + "date": "January 9, 2009" + }, + { + "category": "nodes", + "medium": "email", + "email_id": "16", + "text": "If you can keep a node running that accepts incoming connections, you'll really be helping the network a lot. Port 8333 on your firewall needs to be open to receive incoming connections.", + "date": "January 9, 2009" + }, + { + "category": "mining", + "medium": "email", + "email_id": "16", + "text": "You can get coins by getting someone to send you some, or turn on Options->Generate Coins to run a node and generate blocks. I made the proof-of-work difficulty ridiculously easy to start with, so for a little while in the beginning a typical PC will be able to generate coins in just a few hours. It'll get a lot harder when competition makes the automatic adjustment drive up the difficulty. Generated coins must wait 120 blocks to mature before they can be spent.", + "date": "January 9, 2009" + }, + { + "category": "transactions", + "medium": "email", + "email_id": "16", + "text": "There are two ways to send money. If the recipient is online, you can enter their IP address and it will connect, get a new public key and send the transaction with comments. If the recipient is not online, it is possible to send to their Bitcoin address, which is a hash of their public key that they give you. They'll receive the transaction the next time they connect and get the block it's in. This method has the disadvantage that no comment information is sent, and a bit of privacy may be lost if the address is used multiple times, but it is a useful alternative if both users can't be online at the same time or the recipient can't receive incoming connections.", + "date": "January 9, 2009" + }, + { + "category": "bitcoin-economics", + "medium": "email", + "email_id": "16", + "text": "Total circulation will be 21,000,000 coins. It'll be distributed to network nodes when they make blocks, with the amount cut in half every 4 years.\n\nfirst 4 years: 10,500,000 coins\nnext 4 years: 5,250,000 coins\nnext 4 years: 2,625,000 coins\nnext 4 years: 1,312,500 coins\netc...\n\nWhen that runs out, the system can support transaction fees if needed. It's based on open market competition, and there will probably always be nodes willing to process transactions for free.", + "date": "January 9, 2009" + }, + { + "category": "cryptocurrency", + "medium": "email", + "email_id": "17", + "text": "I would be surprised if 10 years from now we're not using electronic currency in some way, now that we know a way to do it that won't inevitably get dumbed down when the trusted third party gets cold feet.", + "date": "January 17, 2009" + }, + { + "category": "micropayments", + "medium": "email", + "email_id": "17", + "text": "It can already be used for pay-to-send e-mail. The send dialog is resizeable and you can enter as long of a message as you like. It's sent directly when it connects. The recipient doubleclicks on the transaction to see the full message. If someone famous is getting more e-mail than they can read, but would still like to have a way for fans to contact them, they could set up Bitcoin and give out the IP address on their website. \"Send X bitcoins to my priority hotline at this IP and I'll read the message personally.\"", + "date": "January 17, 2009" + }, + { + "category": "micropayments", + "medium": "email", + "email_id": "17", + "text": "Subscription sites that need some extra proof-of-work for their free trial so it doesn't cannibalize subscriptions could charge bitcoins for the trial.", + "date": "January 17, 2009" + }, + { + "category": "micropayments, bitcoin-economics", + "medium": "email", + "email_id": "17", + "text": "It might make sense just to get some in case it catches on. If enough people think the same way, that becomes a self fulfilling prophecy. Once it gets bootstrapped, there are so many applications if you could effortlessly pay a few cents to a website as easily as dropping coins in a vending machine.", + "date": "January 17, 2009" + }, + { + "category": "cryptocurrency", + "text": "A purely peer-to-peer version of electronic cash would allow online payments to be sent directly from one party to another without going through a financial institution.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work, double-spending", + "text": "We propose a solution to the double-spending problem using a peer-to-peer network. The network timestamps transactions by hashing them into an ongoing chain of hash-based proof-of-work, forming a record that cannot be changed without redoing the proof-of-work. The longest chain not only serves as proof of the sequence of events witnessed, but proof that it came from the largest pool of CPU proof-of-worker. As long as a majority of CPU proof-of-worker is controlled by nodes that are not cooperating to attack the network, they'll generate the longest chain and outpace attackers. The network itself requires minimal structure.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "trusted-third-parties", + "text": "Commerce on the Internet has come to rely almost exclusively on financial institutions serving as trusted third parties to process electronic payments. While the system works well enough for most transactions, it still suffers from the inherent weaknesses of the trust based model.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "trusted-third-parties", + "text": "Completely non-reversible transactions are not really possible, since financial institutions cannot avoid mediating disputes. The cost of mediation increases transaction costs, limiting the minimum practical transaction size and cutting off the possibility for small casual transactions, and there is a broader cost in the loss of ability to make non-reversible payments for non-reversible services. With the possibility of reversal, the need for trust spreads.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "trusted-third-parties, cryptocurrency", + "text": "What is needed is an electronic payment system based on cryptographic proof instead of trust, allowing any two willing parties to transact directly with each other without the need for a trusted third party. Transactions that are computationally impractical to reverse would protect sellers from fraud, and routine escrow mechanisms could easily be implemented to protect buyers.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "double-spending, proof-of-work", + "text": "In this paper, we propose a solution to the double-spending problem using a peer-to-peer distributed timestamp server to generate computational proof of the chronological order of transactions. The system is secure as long as honest nodes collectively control more CPU proof-of-worker than any cooperating group of attacker nodes.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "We define an electronic coin as a chain of digital signatures. Each owner transfers the coin to the next by digitally signing a hash of the previous transaction and the public key of the next owner and adding these to the end of the coin. A payee can verify the signatures to verify the chain of ownership.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "economics, double-spending", + "text": "The problem of course is the payee can't verify that one of the owners did not double-spend the coin. A common solution is to introduce a trusted central authority, or mint, that checks every transaction for double spending. After each transaction, the coin must be returned to the mint to issue a new coin, and only coins issued directly from the mint are trusted not to be double-spent. The problem with this solution is that the fate of the entire money system depends on the company running the mint, with every transaction having to go through them, just like a bank.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "nodes, cryptocurrency, transactions", + "text": "We need a way for the payee to know that the previous owners did not sign any earlier transactions. For our purposes, the earliest transaction is the one that counts, so we don't care about later attempts to double-spend. The only way to confirm the absence of a transaction is to be aware of all transactions. In the mint based model, the mint was aware of all transactions and decided which arrived first. To accomplish this without a trusted party, transactions must be publicly announced, and we need a system for participants to agree on a single history of the order in which they were received. The payee needs proof that at the time of each transaction, the majority of nodes agreed it was the first received.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "The solution we propose begins with a timestamp server. A timestamp server works by taking a hash of a block of items to be timestamped and widely publishing the hash, such as in a newspaper or Usenet post. The timestamp proves that the data must have existed at the time, obviously, in order to get into the hash. Each timestamp includes the previous timestamp in its hash, forming a chain, with each additional timestamp reinforcing the ones before it.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work", + "text": "To implement a distributed timestamp server on a peer-to-peer basis, we will need to use a proof-of-work system similar to Adam Back's Hashcash, rather than newspaper or Usenet posts. The proof-of-work involves scanning for a value that when hashed, such as with SHA-256, the hash begins with a number of zero bits. The average work required is exponential in the number of zero bits required and can be verified by executing a single hash.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work", + "text": "For our timestamp network, we implement the proof-of-work by incrementing a nonce in the block until a value is found that gives the block's hash the required zero bits. Once the CPU effort has been expended to make it satisfy the proof-of-work, the block cannot be changed without redoing the work. As later blocks are chained after it, the work to change the block would include redoing all the blocks after it.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work", + "text": "The proof-of-work also solves the problem of determining representation in majority decision making. If the majority were based on one-IP-address-one-vote, it could be subverted by anyone able to allocate many IPs. Proof-of-work is essentially one-CPU-one-vote. The majority decision is represented by the longest chain, which has the greatest proof-of-work effort invested in it. If a majority of CPU proof-of-worker is controlled by honest nodes, the honest chain will grow the fastest and outpace any competing chains. To modify a past block, an attacker would have to redo the proof-of-work of the block and all blocks after it and then catch up with and surpass the work of the honest nodes. We will show later that the probability of a slower attacker catching up diminishes exponentially as subsequent blocks are added.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "proof-of-work, difficulty", + "text": "To compensate for increasing hardware speed and varying interest in running nodes over time, the proof-of-work difficulty is determined by a moving average targeting an average number of blocks per hour. If they're generated too fast, the difficulty increases.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design, nodes, proof-of-work", + "text": "The steps to run the network are as follows:\n\n1. New transactions are broadcast to all nodes.\n2. Each node collects new transactions into a block.\n3. Each node works on finding a difficult proof-of-work for its block.\n4. When a node finds a proof-of-work, it broadcasts the block to all nodes.\n5. Nodes accept the block only if all transactions in it are valid and not already spent.\n6. Nodes express their acceptance of the block by working on creating the next block in the chain, using the hash of the accepted block as the previous hash.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "nodes, proof-of-work", + "text": "Nodes always consider the longest chain to be the correct one and will keep working on extending it. If two nodes broadcast different versions of the next block simultaneously, some nodes may receive one or the other first. In that case, they work on the first one they received, but save the other branch in case it becomes longer. The tie will be broken when the next proof-of-work is found and one branch becomes longer; the nodes that were working on the other branch will then switch to the longer one.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "New transaction broadcasts do not necessarily need to reach all nodes. As long as they reach many nodes, they will get into a block before long. Block broadcasts are also tolerant of dropped messages. If a node does not receive a block, it will request it when it receives the next block and realizes it missed one.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "mining, bitcoin-economics", + "text": "By convention, the first transaction in a block is a special transaction that starts a new coin owned by the creator of the block. This adds an incentive for nodes to support the network, and provides a way to initially distribute coins into circulation, since there is no central authority to issue them. The steady addition of a constant of amount of new coins is analogous to gold miners expending resources to add gold to circulation. In our case, it is CPU time and electricity that is expended.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "fees, bitcoin-economics", + "text": "The incentive can also be funded with transaction fees. If the output value of a transaction is less than its input value, the difference is a transaction fee that is added to the incentive value of the block containing the transaction. Once a predetermined number of coins have entered circulation, the incentive can transition entirely to transaction fees and be completely inflation free.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "mining, bitcoin-economics", + "text": "The incentive may help encourage nodes to stay honest. If a greedy attacker is able to assemble more CPU proof-of-worker than all the honest nodes, he would have to choose between using it to defraud people by stealing back his payments, or using it to generate new coins. He ought to find it more profitable to play by the rules, such rules that favour him with more new coins than everyone else combined, than to undermine the system and the validity of his own wealth.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design", + "text": "Once the latest transaction in a coin is buried under enough blocks, the spent transactions before it can be discarded to save disk space. To facilitate this without breaking the block's hash, transactions are hashed in a Merkle Tree, with only the root included in the block's hash. Old blocks can then be compacted by stubbing off branches of the tree. The interior hashes do not need to be stored.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design", + "text": "A block header with no transactions would be about 80 bytes. If we suppose blocks are generated every 10 minutes, 80 bytes * 6 * 24 * 365 = 4.2MB per year. With computer systems typically selling with 2GB of RAM as of 2008, and Moore's Law predicting current growth of 1.2GB per year, storage should not be a problem even if the block headers must be kept in memory.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design, nodes", + "text": "It is possible to verify payments without running a full network node. A user only needs to keep a copy of the block headers of the longest proof-of-work chain, which he can get by querying network nodes until he's convinced he has the longest chain, and obtain the Merkle branch linking the transaction to the block it's timestamped in. He can't check the transaction for himself, but by linking it to a place in the chain, he can see that a network node has accepted it, and blocks added after it further confirm the network has accepted it. \nAs such, the verification is reliable as long as honest nodes control the network, but is more vulnerable if the network is overproof-of-workered by an attacker. While network nodes can verify transactions for themselves, the simplified method can be fooled by an attacker's fabricated transactions for as long as the attacker can continue to overproof-of-worker the network. One strategy to protect against this would be to accept alerts from network nodes when they detect an invalid block, prompting the user's software to download the full block and alerted transactions to confirm the inconsistency. Businesses that receive frequent payments will probably still want to run their own nodes for more independent security and quicker verification.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions, bitcoin-design", + "text": "Although it would be possible to handle coins individually, it would be unwieldy to make a separate transaction for every cent in a transfer. To allow value to be split and combined, transactions contain multiple inputs and outputs. Normally there will be either a single input from a larger previous transaction or multiple inputs combining smaller amounts, and at most two outputs: one for the payment, and one returning the change, if any, back to the sender.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions", + "text": "It should be noted that fan-out, where a transaction depends on several transactions, and those transactions depend on many more, is not a problem here. There is never the need to extract a complete standalone copy of a transaction's history.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "transactions, privacy, trusted-third-parties", + "text": "The traditional banking model achieves a level of privacy by limiting access to information to the parties involved and the trusted third party. The necessity to announce all transactions publicly precludes this method, but privacy can still be maintained by breaking the flow of information in another place: by keeping public keys anonymous. The public can see that someone is sending an amount to someone else, but without information linking the transaction to anyone. This is similar to the level of information released by stock exchanges, where the time and size of individual trades, the \"tape\", is made public, but without telling who the parties were.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "addresses, privacy", + "text": "As an additional firewall, a new key pair should be used for each transaction to keep them from being linked to a common owner. Some linking is still unavoidable with multi-input transactions, which necessarily reveal that their inputs were owned by the same owner. The risk is that if the owner of a key is revealed, linking could reveal other transactions that belonged to the same owner.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "mining, proof-of-work", + "text": "We consider the scenario of an attacker trying to generate an alternate chain faster than the honest chain. Even if this is accomplished, it does not throw the system open to arbitrary changes, such as creating value out of thin air or taking money that never belonged to the attacker. Nodes are not going to accept an invalid transaction as payment, and honest nodes will never accept a block containing them. An attacker can only try to change one of his own transactions to take back money he recently spent.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "bitcoin-design, trusted-third-parties", + "text": "We have proposed a system for electronic transactions without relying on trust. We started with the usual framework of coins made from digital signatures, which provides strong control of ownership, but is incomplete without a way to prevent double-spending. To solve this, we proposed a peer-to-peer network using proof-of-work to record a public history of transactions that quickly becomes computationally impractical for an attacker to change if honest nodes control a majority of CPU proof-of-worker.", + "medium": "whitepaper", + "date": "October 31, 2008" + }, + { + "category": "nodes, mining", + "text": "The network is robust in its unstructured simplicity. Nodes work all at once with little coordination. They do not need to be identified, since messages are not routed to any particular place and only need to be delivered on a best effort basis. Nodes can leave and rejoin the network at will, accepting the proof-of-work chain as proof of what happened while they were gone. They vote with their CPU proof-of-worker, expressing their acceptance of valid blocks by working on extending them and rejecting invalid blocks by refusing to work on them. Any needed rules and incentives can be enforced with this consensus mechanism.", + "medium": "whitepaper", + "date": "October 31, 2008" } ] \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index d003e7bdd..865971cdf 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -89,10 +89,16 @@ async def api_gerty_delete( @gerty_ext.get("/api/v1/gerty/satoshiquote", status_code=HTTPStatus.OK) async def api_gerty_satoshi(): + maxQuoteLength = 353; with open(os.path.join(LNBITS_PATH, 'extensions/gerty/static/satoshi.json')) as fd: satoshiQuotes = json.load(fd) - return satoshiQuotes[random.randint(0, len(satoshiQuotes) - 1)] - + quote = satoshiQuotes[random.randint(0, len(satoshiQuotes) - 1)] + # logger.debug(quote.text) + if len(quote["text"]) > maxQuoteLength: + logger.debug("Quote is too long, getting another") + return await api_gerty_satoshi() + else: + return quote @gerty_ext.get("/api/v1/gerty/pieterwielliequote", status_code=HTTPStatus.OK) async def api_gerty_wuille(): @@ -284,9 +290,10 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int # wrap the text wrapper = textwrap.TextWrapper(width=line_width) word_list = wrapper.wrap(text=text) + logger.debug("number of chars = {0}".format(len(text))) multilineText = '\n'.join(word_list) - logger.debug("number of lines = {0}".format(len(word_list))) + # logger.debug("number of lines = {0}".format(len(word_list))) # logger.debug('multilineText') # logger.debug(multilineText) From ce398d26c30e95d060e29dce0a19eb34897c1b1b Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sun, 2 Oct 2022 16:23:20 +0100 Subject: [PATCH 12/57] decrease sat char count to 180 --- lnbits/extensions/gerty/views_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 865971cdf..7bc97af50 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -89,7 +89,7 @@ async def api_gerty_delete( @gerty_ext.get("/api/v1/gerty/satoshiquote", status_code=HTTPStatus.OK) async def api_gerty_satoshi(): - maxQuoteLength = 353; + maxQuoteLength = 186; with open(os.path.join(LNBITS_PATH, 'extensions/gerty/static/satoshi.json')) as fd: satoshiQuotes = json.load(fd) quote = satoshiQuotes[random.randint(0, len(satoshiQuotes) - 1)] @@ -293,7 +293,7 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int logger.debug("number of chars = {0}".format(len(text))) multilineText = '\n'.join(word_list) - # logger.debug("number of lines = {0}".format(len(word_list))) + logger.debug("number of lines = {0}".format(len(word_list))) # logger.debug('multilineText') # logger.debug(multilineText) From 5a2aa8b42b02a15302e0fff264374b84c99baffb Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sun, 2 Oct 2022 16:23:59 +0100 Subject: [PATCH 13/57] sat quote font size to 15 --- lnbits/extensions/gerty/views_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 7bc97af50..486162c5e 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -242,7 +242,7 @@ async def get_satoshi_quotes(): quote = await api_gerty_satoshi() if quote: if quote['text']: - text.append(get_text_item_dict(quote['text'], 12)) + text.append(get_text_item_dict(quote['text'], 15)) if quote['date']: text.append(get_text_item_dict("Satoshi Nakamoto - {0}".format(quote['date']), 15)) return text From 72b953b354bd9ab23d29bb3d6921e1ac66e0f9e6 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Mon, 3 Oct 2022 17:18:53 +0100 Subject: [PATCH 14/57] Got basic dashboard layout working --- .../gerty/templates/gerty/index.html | 6 ++++ lnbits/extensions/gerty/views_api.py | 29 +++++++++++++++++-- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index a59bf15d8..d341ce987 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -175,6 +175,11 @@

Use the toggles below to control what your Gerty will display

+ + = enabled_screen_count) else p + 1; @@ -170,6 +171,8 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): logger.debug('screen_slug') logger.debug(screen_slug) # text = [] + if screen_slug == "dashboard": + text = await get_dashboard(gerty) if screen_slug == "lnbits_wallets_balance": text = await get_lnbits_wallet_balances(gerty) elif screen_slug == "fun_satoshi_quotes": @@ -208,6 +211,28 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): text = await get_placeholder_text() return text +# Get the dashboard screen +async def get_dashboard(gerty): + text = [] + # XC rate + text.append(get_text_item_dict("19,255", 40, 145, 161)) + text.append(get_text_item_dict("BTCUSD price", 15, 155, 199)) + # balance + text.append(get_text_item_dict("Alice's wallet balance", 15, 524, 50)) + text.append(get_text_item_dict("102,101", 40, 524, 126)) + text.append(get_text_item_dict("Bob's wallet balance", 15, 524, 211)) + text.append(get_text_item_dict("102", 40, 524, 286)) + + # Mempool fees + text.append(get_text_item_dict("756,885", 40, 115, 416)) + text.append(get_text_item_dict("Current block height", 15, 115, 456)) + + # difficulty adjustment time + text.append(get_text_item_dict("7 days, 2 hours, 0 minutes", 15, 514, 390)) + text.append(get_text_item_dict("until next difficulty adjustment", 12, 514, 420)) + + return text + async def get_lnbits_wallet_balances(gerty): # Get Wallet info @@ -290,10 +315,10 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int # wrap the text wrapper = textwrap.TextWrapper(width=line_width) word_list = wrapper.wrap(text=text) - logger.debug("number of chars = {0}".format(len(text))) + # logger.debug("number of chars = {0}".format(len(text))) multilineText = '\n'.join(word_list) - logger.debug("number of lines = {0}".format(len(word_list))) + # logger.debug("number of lines = {0}".format(len(word_list))) # logger.debug('multilineText') # logger.debug(multilineText) From 46daf57cacb3f56277f6bbe43c0b7a37c2f6e80b Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Tue, 4 Oct 2022 10:52:53 +0100 Subject: [PATCH 15/57] Moved text into an areas list --- lnbits/extensions/gerty/views_api.py | 104 ++++++++++++++++++--------- 1 file changed, 70 insertions(+), 34 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 87cd4f139..d564c6c5a 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -154,7 +154,7 @@ async def api_gerty_json( "screen": { "slug": get_screen_slug_by_index(p, enabled_screens), "group": get_screen_slug_by_index(p, enabled_screens), - "text": text + "areas": text } } @@ -170,68 +170,77 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): # first get the relevant slug from the display_preferences logger.debug('screen_slug') logger.debug(screen_slug) - # text = [] if screen_slug == "dashboard": - text = await get_dashboard(gerty) + areas = await get_dashboard(gerty) if screen_slug == "lnbits_wallets_balance": - text = await get_lnbits_wallet_balances(gerty) + areas = await get_lnbits_wallet_balances(gerty) elif screen_slug == "fun_satoshi_quotes": - text = await get_satoshi_quotes() + areas = await get_satoshi_quotes() elif screen_slug == "fun_pieter_wuille_facts": - text = await get_pieter_wuille_fact() + areas = await get_pieter_wuille_fact() elif screen_slug == "fun_exchange_market_rate": - text = await get_exchange_rate(gerty) + areas = await get_exchange_rate(gerty) elif screen_slug == "onchain_difficulty_epoch_progress": - text = await get_onchain_stat(screen_slug, gerty) + areas = await get_onchain_stat(screen_slug, gerty) elif screen_slug == "onchain_difficulty_retarget_date": - text = await get_onchain_stat(screen_slug, gerty) + areas = await get_onchain_stat(screen_slug, gerty) elif screen_slug == "onchain_difficulty_blocks_remaining": - text = await get_onchain_stat(screen_slug, gerty) + areas = await get_onchain_stat(screen_slug, gerty) elif screen_slug == "onchain_difficulty_epoch_time_remaining": - text = await get_onchain_stat(screen_slug, gerty) + areas = await get_onchain_stat(screen_slug, gerty) elif screen_slug == "mempool_recommended_fees": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "mempool_tx_count": - text = await get_mempool_stat(screen_slug, gerty) + areas = await get_mempool_stat(screen_slug, gerty) elif screen_slug == "mining_current_hash_rate": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "mining_current_difficulty": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "lightning_channel_count": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "lightning_node_count": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "lightning_tor_node_count": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "lightning_clearnet_nodes": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "lightning_unannounced_nodes": - text = await get_placeholder_text() + areas = await get_placeholder_text() elif screen_slug == "lightning_average_channel_capacity": - text = await get_placeholder_text() - return text + areas = await get_placeholder_text() + + return areas # Get the dashboard screen async def get_dashboard(gerty): - text = [] + screens = [] # XC rate - text.append(get_text_item_dict("19,255", 40, 145, 161)) - text.append(get_text_item_dict("BTCUSD price", 15, 155, 199)) + text = [] + amount = await satoshis_amount_as_fiat(100000000, gerty.exchange) + text.append(get_text_item_dict(format_number(amount), 40, 145, 161)) + text.append(get_text_item_dict("BTC{0} price".format(gerty.exchange), 15, 155, 199)) + screens.append(text) # balance + text = [] text.append(get_text_item_dict("Alice's wallet balance", 15, 524, 50)) text.append(get_text_item_dict("102,101", 40, 524, 126)) text.append(get_text_item_dict("Bob's wallet balance", 15, 524, 211)) text.append(get_text_item_dict("102", 40, 524, 286)) + screens.append(text) # Mempool fees - text.append(get_text_item_dict("756,885", 40, 115, 416)) + text = [] + text.append(get_text_item_dict(format_number(await get_block_height(gerty)), 40, 115, 416)) text.append(get_text_item_dict("Current block height", 15, 115, 456)) + screens.append(text) # difficulty adjustment time - text.append(get_text_item_dict("7 days, 2 hours, 0 minutes", 15, 514, 390)) + text = [] + text.append(get_text_item_dict(await get_time_remaining_next_difficulty_adjustment(gerty), 15, 514, 390)) text.append(get_text_item_dict("until next difficulty adjustment", 12, 514, 420)) + screens.append(text) - return text + return screens async def get_lnbits_wallet_balances(gerty): @@ -244,13 +253,13 @@ async def get_lnbits_wallet_balances(gerty): wallet = await get_wallet_for_key(key=lnbits_wallet) logger.debug(wallet) if wallet: - wallets.append({ - "name": wallet.name, - "balance": wallet.balance_msat, - "inkey": wallet.inkey, - }) + # wallets.append({ + # "name": wallet.name, + # "balance": wallet.balance_msat, + # "inkey": wallet.inkey, + # }) text.append(get_text_item_dict(wallet.name, 20)) - text.append(get_text_item_dict(wallet.balance, 40)) + text.append(get_text_item_dict(format_number(wallet.balance_msat), 40)) return text @@ -365,6 +374,33 @@ async def get_onchain_stat(stat_slug: str, gerty): text.append(get_text_item_dict(get_time_remaining(stat / 1000, 4), 20)) return text + +async def get_time_remaining_next_difficulty_adjustment(gerty): + if isinstance(gerty.mempool_endpoint, str): + async with httpx.AsyncClient() as client: + r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") + stat = r.json()['remainingTime'] + time = get_time_remaining(stat / 1000, 3) + return time + +async def get_block_height(gerty): + if isinstance(gerty.mempool_endpoint, str): + async with httpx.AsyncClient() as client: + r = await client.get(gerty.mempool_endpoint + "/api/blocks/tip/height") + + return r.json() + +async def get_mempool_recommended_fees(gerty): + if isinstance(gerty.mempool_endpoint, str): + async with httpx.AsyncClient() as client: + r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") + return { + "high": r.fastestFee, + "medium": r.halfHourFee, + "low": r.economyFee, + } + + async def get_mempool_stat(stat_slug: str, gerty): text = [] if isinstance(gerty.mempool_endpoint, str): From 0a63576a6ac43155cdb38d99487ba9732b5f70d7 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Tue, 4 Oct 2022 10:54:34 +0100 Subject: [PATCH 16/57] Added refresh time --- lnbits/extensions/gerty/views_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index d564c6c5a..1560ca817 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -146,7 +146,7 @@ async def api_gerty_json( return { "settings": { "refreshTime": gerty.refresh_time, - "requestTimestamp": round(time.time()), + "requestTimestamp": datetime.fromtimestamp(time.time()).strftime("%e %b %Y at %H:%M"), "nextScreenNumber": next_screen_number, "showTextBoundRect": False, "name": gerty.name From 0f0d24a7ecd5c090c90f915a531c5ce714fc8937 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Tue, 4 Oct 2022 19:08:24 +0100 Subject: [PATCH 17/57] move non dashboard data into areas list --- lnbits/extensions/gerty/views_api.py | 69 ++++++++++++++-------------- 1 file changed, 35 insertions(+), 34 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 1560ca817..ebe8d72db 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -170,77 +170,78 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): # first get the relevant slug from the display_preferences logger.debug('screen_slug') logger.debug(screen_slug) + areas = [] if screen_slug == "dashboard": areas = await get_dashboard(gerty) if screen_slug == "lnbits_wallets_balance": - areas = await get_lnbits_wallet_balances(gerty) + areas.append(await get_lnbits_wallet_balances(gerty)) elif screen_slug == "fun_satoshi_quotes": - areas = await get_satoshi_quotes() + areas.append(await get_satoshi_quotes()) elif screen_slug == "fun_pieter_wuille_facts": - areas = await get_pieter_wuille_fact() + areas.append(await get_pieter_wuille_fact()) elif screen_slug == "fun_exchange_market_rate": - areas = await get_exchange_rate(gerty) + areas.append(await get_exchange_rate(gerty)) elif screen_slug == "onchain_difficulty_epoch_progress": - areas = await get_onchain_stat(screen_slug, gerty) + areas.append(await get_onchain_stat(screen_slug, gerty)) elif screen_slug == "onchain_difficulty_retarget_date": - areas = await get_onchain_stat(screen_slug, gerty) + areas.append(await get_onchain_stat(screen_slug, gerty)) elif screen_slug == "onchain_difficulty_blocks_remaining": - areas = await get_onchain_stat(screen_slug, gerty) + areas.append(await get_onchain_stat(screen_slug, gerty)) elif screen_slug == "onchain_difficulty_epoch_time_remaining": - areas = await get_onchain_stat(screen_slug, gerty) + areas.append(await get_onchain_stat(screen_slug, gerty)) elif screen_slug == "mempool_recommended_fees": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "mempool_tx_count": - areas = await get_mempool_stat(screen_slug, gerty) + areas.append(await get_mempool_stat(screen_slug, gerty)) elif screen_slug == "mining_current_hash_rate": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "mining_current_difficulty": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "lightning_channel_count": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "lightning_node_count": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "lightning_tor_node_count": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "lightning_clearnet_nodes": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "lightning_unannounced_nodes": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) elif screen_slug == "lightning_average_channel_capacity": - areas = await get_placeholder_text() + areas.append(await get_placeholder_text()) return areas # Get the dashboard screen async def get_dashboard(gerty): - screens = [] + areas = [] # XC rate text = [] amount = await satoshis_amount_as_fiat(100000000, gerty.exchange) - text.append(get_text_item_dict(format_number(amount), 40, 145, 161)) - text.append(get_text_item_dict("BTC{0} price".format(gerty.exchange), 15, 155, 199)) - screens.append(text) + text.append(get_text_item_dict(format_number(amount), 40)) + text.append(get_text_item_dict("BTC{0} price".format(gerty.exchange), 15)) + areas.append(text) # balance text = [] - text.append(get_text_item_dict("Alice's wallet balance", 15, 524, 50)) - text.append(get_text_item_dict("102,101", 40, 524, 126)) - text.append(get_text_item_dict("Bob's wallet balance", 15, 524, 211)) - text.append(get_text_item_dict("102", 40, 524, 286)) - screens.append(text) + text.append(get_text_item_dict("Alice's wallet balance", 15)) + text.append(get_text_item_dict("102,101", 40)) + text.append(get_text_item_dict("Bob's wallet balance", 15)) + text.append(get_text_item_dict("102", 40)) + areas.append(text) # Mempool fees text = [] - text.append(get_text_item_dict(format_number(await get_block_height(gerty)), 40, 115, 416)) - text.append(get_text_item_dict("Current block height", 15, 115, 456)) - screens.append(text) + text.append(get_text_item_dict(format_number(await get_block_height(gerty)), 40)) + text.append(get_text_item_dict("Current block height", 15)) + areas.append(text) # difficulty adjustment time text = [] - text.append(get_text_item_dict(await get_time_remaining_next_difficulty_adjustment(gerty), 15, 514, 390)) - text.append(get_text_item_dict("until next difficulty adjustment", 12, 514, 420)) - screens.append(text) + text.append(get_text_item_dict(await get_time_remaining_next_difficulty_adjustment(gerty), 15)) + text.append(get_text_item_dict("until next difficulty adjustment", 12)) + areas.append(text) - return screens + return areas async def get_lnbits_wallet_balances(gerty): From 9efe15a699f078a4ba62ccd789850ffc8aac3a8d Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 11:05:49 +0100 Subject: [PATCH 18/57] Adding mempool fees endpoint --- lnbits/extensions/gerty/views_api.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index ebe8d72db..39a0581f0 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -190,7 +190,7 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): elif screen_slug == "onchain_difficulty_epoch_time_remaining": areas.append(await get_onchain_stat(screen_slug, gerty)) elif screen_slug == "mempool_recommended_fees": - areas.append(await get_placeholder_text()) + areas.append(await get_mempool_stat(screen_slug, gerty)) elif screen_slug == "mempool_tx_count": areas.append(await get_mempool_stat(screen_slug, gerty)) elif screen_slug == "mining_current_hash_rate": @@ -395,10 +395,13 @@ async def get_mempool_recommended_fees(gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") + logger.debug('fees') + logger.debug(r) return { - "high": r.fastestFee, - "medium": r.halfHourFee, - "low": r.economyFee, + # "high": r.fastestFee, + # "medium": r.halfHourFee, + # "low": r.hourFee, + # "none": r.economyFee, } @@ -414,6 +417,11 @@ async def get_mempool_stat(stat_slug: str, gerty): stat = round(r.json()['count']) text.append(get_text_item_dict("Transactions in the mempool", 15)) text.append(get_text_item_dict("{0}".format(format_number(stat)), 80)) + elif ( + stat_slug == "mempool_recommended_fees" + ): + fees = await get_mempool_recommended_fees(gerty) + logger.debug(fees) return text def get_date_suffix(dayNumber): From f828e4a64185795a426a19bbb8efe271b757d255 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 12:08:46 +0100 Subject: [PATCH 19/57] Laid out mempool.space fees page --- lnbits/core/views/api.py | 5 +-- lnbits/core/views/generic.py | 3 +- lnbits/core/views/public_api.py | 2 - lnbits/extensions/gerty/views_api.py | 44 +++++++++++++++---- tests/conftest.py | 4 +- tests/core/views/test_api.py | 6 --- tests/core/views/test_generic.py | 3 -- tests/core/views/test_public_api.py | 3 -- tests/extensions/bleskomat/conftest.py | 1 - tests/extensions/bleskomat/test_lnurl_api.py | 3 -- tests/extensions/boltz/conftest.py | 10 +---- tests/extensions/boltz/test_api.py | 3 +- tests/extensions/boltz/test_swap.py | 9 +--- tests/extensions/invoices/conftest.py | 1 - .../extensions/invoices/test_invoices_api.py | 8 ---- tests/mocks.py | 1 - 16 files changed, 42 insertions(+), 64 deletions(-) diff --git a/lnbits/core/views/api.py b/lnbits/core/views/api.py index 7a2bbbe60..c33e874cb 100644 --- a/lnbits/core/views/api.py +++ b/lnbits/core/views/api.py @@ -5,7 +5,7 @@ import json import time from http import HTTPStatus from io import BytesIO -from typing import Dict, List, Optional, Tuple, Union +from typing import Dict, Optional, Tuple, Union from urllib.parse import ParseResult, parse_qs, urlencode, urlparse, urlunparse import httpx @@ -17,7 +17,7 @@ from loguru import logger from pydantic import BaseModel from pydantic.fields import Field from sse_starlette.sse import EventSourceResponse -from starlette.responses import HTMLResponse, StreamingResponse +from starlette.responses import StreamingResponse from lnbits import bolt11, lnurl from lnbits.core.models import Payment, Wallet @@ -34,7 +34,6 @@ from lnbits.utils.exchange_rates import ( fiat_amount_as_satoshis, satoshis_amount_as_fiat, ) - from .. import core_app, db from ..crud import ( create_payment, diff --git a/lnbits/core/views/generic.py b/lnbits/core/views/generic.py index 31a7b0300..fcc0365bf 100644 --- a/lnbits/core/views/generic.py +++ b/lnbits/core/views/generic.py @@ -22,8 +22,6 @@ from lnbits.settings import ( LNBITS_SITE_TITLE, SERVICE_FEE, ) - -from ...helpers import get_valid_extensions from ..crud import ( create_account, create_wallet, @@ -34,6 +32,7 @@ from ..crud import ( update_user_extension, ) from ..services import pay_invoice, redeem_lnurl_withdraw +from ...helpers import get_valid_extensions core_html_routes: APIRouter = APIRouter(tags=["Core NON-API Website Routes"]) diff --git a/lnbits/core/views/public_api.py b/lnbits/core/views/public_api.py index 2d2cdd66c..465693d9a 100644 --- a/lnbits/core/views/public_api.py +++ b/lnbits/core/views/public_api.py @@ -6,10 +6,8 @@ from urllib.parse import urlparse from fastapi import HTTPException from loguru import logger from starlette.requests import Request -from starlette.responses import HTMLResponse from lnbits import bolt11 - from .. import core_app from ..crud import get_standalone_payment from ..tasks import api_invoice_listeners diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 39a0581f0..d14639ee7 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -395,14 +395,7 @@ async def get_mempool_recommended_fees(gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") - logger.debug('fees') - logger.debug(r) - return { - # "high": r.fastestFee, - # "medium": r.halfHourFee, - # "low": r.hourFee, - # "none": r.economyFee, - } + return r.json() async def get_mempool_stat(stat_slug: str, gerty): @@ -420,8 +413,41 @@ async def get_mempool_stat(stat_slug: str, gerty): elif ( stat_slug == "mempool_recommended_fees" ): + y_offset = 60 fees = await get_mempool_recommended_fees(gerty) - logger.debug(fees) + pos_y = 80 + y_offset + text.append(get_text_item_dict("mempool.space", 40, 160, pos_y)) + pos_y = 180 + y_offset + text.append(get_text_item_dict("Recommended Tx Fees", 20, 240, pos_y)) + + pos_y = 280 + y_offset + text.append(get_text_item_dict("{0}".format("No Priority"), 15, 30, pos_y)) + text.append(get_text_item_dict("{0}".format("Low Priority"), 15, 235, pos_y)) + text.append(get_text_item_dict("{0}".format("Medium Priority"), 15, 460, pos_y)) + text.append(get_text_item_dict("{0}".format("High Priority"), 15, 750, pos_y)) + + pos_y = 340 + y_offset + font_size = 15 + fee_append = "/vB" + fee_rate = fees["economyFee"] + text.append(get_text_item_dict( + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + 30, pos_y)) + + fee_rate = fees["hourFee"] + text.append(get_text_item_dict( + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + 235, pos_y)) + + fee_rate = fees["halfHourFee"] + text.append(get_text_item_dict( + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + 460, pos_y)) + + fee_rate = fees["fastestFee"] + text.append(get_text_item_dict( + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + 750, pos_y)) return text def get_date_suffix(dayNumber): diff --git a/tests/conftest.py b/tests/conftest.py index 1e719c76a..fd8d4d424 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,13 +1,11 @@ import asyncio -from typing import Tuple import pytest_asyncio from httpx import AsyncClient from lnbits.app import create_app from lnbits.commands import migrate_databases -from lnbits.core.crud import create_account, create_wallet, get_wallet -from lnbits.core.models import BalanceCheck, Payment, User, Wallet +from lnbits.core.crud import create_account, create_wallet from lnbits.core.views.api import CreateInvoiceData, api_payments_create_invoice from lnbits.db import Database from lnbits.settings import HOST, PORT diff --git a/tests/core/views/test_api.py b/tests/core/views/test_api.py index e0f6b5762..81468fd1f 100644 --- a/tests/core/views/test_api.py +++ b/tests/core/views/test_api.py @@ -1,18 +1,12 @@ import hashlib -from binascii import hexlify import pytest -import pytest_asyncio from lnbits import bolt11 -from lnbits.core.crud import get_wallet from lnbits.core.views.api import ( - CreateInvoiceData, api_payment, - api_payments_create_invoice, ) from lnbits.settings import wallet_class - from ...helpers import get_random_invoice_data, is_regtest diff --git a/tests/core/views/test_generic.py b/tests/core/views/test_generic.py index 4300b78b9..ac25e1e78 100644 --- a/tests/core/views/test_generic.py +++ b/tests/core/views/test_generic.py @@ -1,7 +1,4 @@ import pytest -import pytest_asyncio - -from tests.conftest import client @pytest.mark.asyncio diff --git a/tests/core/views/test_public_api.py b/tests/core/views/test_public_api.py index 6ebaeabd3..144cd161e 100644 --- a/tests/core/views/test_public_api.py +++ b/tests/core/views/test_public_api.py @@ -1,7 +1,4 @@ import pytest -import pytest_asyncio - -from lnbits.core.crud import get_wallet # check if the client is working diff --git a/tests/extensions/bleskomat/conftest.py b/tests/extensions/bleskomat/conftest.py index 13be2b579..595ba6b87 100644 --- a/tests/extensions/bleskomat/conftest.py +++ b/tests/extensions/bleskomat/conftest.py @@ -1,7 +1,6 @@ import json import secrets -import pytest import pytest_asyncio from lnbits.core.crud import create_account, create_wallet diff --git a/tests/extensions/bleskomat/test_lnurl_api.py b/tests/extensions/bleskomat/test_lnurl_api.py index 3f7232669..1fc4ea3ef 100644 --- a/tests/extensions/bleskomat/test_lnurl_api.py +++ b/tests/extensions/bleskomat/test_lnurl_api.py @@ -1,7 +1,6 @@ import secrets import pytest -import pytest_asyncio from lnbits.core.crud import get_wallet from lnbits.extensions.bleskomat.crud import get_bleskomat_lnurl @@ -10,8 +9,6 @@ from lnbits.extensions.bleskomat.helpers import ( query_to_signing_payload, ) from lnbits.settings import HOST, PORT -from tests.conftest import client -from tests.extensions.bleskomat.conftest import bleskomat, lnurl from tests.helpers import credit_wallet, is_regtest from tests.mocks import WALLET diff --git a/tests/extensions/boltz/conftest.py b/tests/extensions/boltz/conftest.py index b9ef78875..1bd1c638d 100644 --- a/tests/extensions/boltz/conftest.py +++ b/tests/extensions/boltz/conftest.py @@ -1,17 +1,9 @@ -import asyncio -import json -import secrets - -import pytest import pytest_asyncio -from lnbits.core.crud import create_account, create_wallet, get_wallet -from lnbits.extensions.boltz.boltz import create_reverse_swap, create_swap +from lnbits.extensions.boltz.boltz import create_reverse_swap from lnbits.extensions.boltz.models import ( CreateReverseSubmarineSwap, - CreateSubmarineSwap, ) -from tests.mocks import WALLET @pytest_asyncio.fixture(scope="session") diff --git a/tests/extensions/boltz/test_api.py b/tests/extensions/boltz/test_api.py index 90ce6ec16..2d64fc40c 100644 --- a/tests/extensions/boltz/test_api.py +++ b/tests/extensions/boltz/test_api.py @@ -1,7 +1,6 @@ import pytest -import pytest_asyncio -from tests.helpers import is_fake, is_regtest +from tests.helpers import is_fake @pytest.mark.asyncio diff --git a/tests/extensions/boltz/test_swap.py b/tests/extensions/boltz/test_swap.py index ab5954acb..f1a820cad 100644 --- a/tests/extensions/boltz/test_swap.py +++ b/tests/extensions/boltz/test_swap.py @@ -1,17 +1,10 @@ -import asyncio - import pytest -import pytest_asyncio -from lnbits.extensions.boltz.boltz import create_reverse_swap, create_swap from lnbits.extensions.boltz.crud import ( create_reverse_submarine_swap, - create_submarine_swap, get_reverse_submarine_swap, - get_submarine_swap, ) -from tests.extensions.boltz.conftest import reverse_swap -from tests.helpers import is_fake, is_regtest +from tests.helpers import is_fake @pytest.mark.asyncio diff --git a/tests/extensions/invoices/conftest.py b/tests/extensions/invoices/conftest.py index 09ac42ecb..277368d6b 100644 --- a/tests/extensions/invoices/conftest.py +++ b/tests/extensions/invoices/conftest.py @@ -1,4 +1,3 @@ -import pytest import pytest_asyncio from lnbits.core.crud import create_account, create_wallet diff --git a/tests/extensions/invoices/test_invoices_api.py b/tests/extensions/invoices/test_invoices_api.py index eaadd07b3..5661673e5 100644 --- a/tests/extensions/invoices/test_invoices_api.py +++ b/tests/extensions/invoices/test_invoices_api.py @@ -1,12 +1,4 @@ import pytest -import pytest_asyncio -from loguru import logger - -from lnbits.core.crud import get_wallet -from tests.conftest import adminkey_headers_from, client, invoice -from tests.extensions.invoices.conftest import accounting_invoice, invoices_wallet -from tests.helpers import credit_wallet -from tests.mocks import WALLET @pytest.mark.asyncio diff --git a/tests/mocks.py b/tests/mocks.py index 3fc0efae2..7e2df4f70 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -4,7 +4,6 @@ from lnbits import bolt11 from lnbits.settings import WALLET from lnbits.wallets.base import PaymentResponse, PaymentStatus, StatusResponse from lnbits.wallets.fake import FakeWallet - from .helpers import get_random_string, is_fake From 1280d3ba740fc27a47b5206e5cfe8fde0bb77d49 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 12:13:19 +0100 Subject: [PATCH 20/57] Started move of gerty api calls into helpers.py --- lnbits/extensions/gerty/__init__.py | 2 -- lnbits/extensions/gerty/helpers.py | 7 +++++++ lnbits/extensions/gerty/views_api.py | 8 ++------ 3 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 lnbits/extensions/gerty/helpers.py diff --git a/lnbits/extensions/gerty/__init__.py b/lnbits/extensions/gerty/__init__.py index 6ec5f6b3c..c5f526b54 100644 --- a/lnbits/extensions/gerty/__init__.py +++ b/lnbits/extensions/gerty/__init__.py @@ -9,10 +9,8 @@ from lnbits.tasks import catch_everything_and_restart db = Database("ext_gerty") - gerty_ext: APIRouter = APIRouter(prefix="/gerty", tags=["Gerty"]) - def gerty_renderer(): return template_renderer(["lnbits/extensions/gerty/templates"]) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py new file mode 100644 index 000000000..80ea1bc19 --- /dev/null +++ b/lnbits/extensions/gerty/helpers.py @@ -0,0 +1,7 @@ +import httpx + +async def get_mempool_recommended_fees(gerty): + if isinstance(gerty.mempool_endpoint, str): + async with httpx.AsyncClient() as client: + r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") + return r.json() \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index d14639ee7..9838cecfb 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -20,6 +20,8 @@ from lnbits.core.views.api import api_payment, api_wallet from lnbits.decorators import WalletTypeInfo, get_key_type, require_admin_key from fastapi.templating import Jinja2Templates +from .helpers import * + from . import gerty_ext from .crud import create_gerty, update_gerty, delete_gerty, get_gerty, get_gertys from .models import Gerty @@ -391,12 +393,6 @@ async def get_block_height(gerty): return r.json() -async def get_mempool_recommended_fees(gerty): - if isinstance(gerty.mempool_endpoint, str): - async with httpx.AsyncClient() as client: - r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") - return r.json() - async def get_mempool_stat(stat_slug: str, gerty): text = [] From d67efc320db979c4c18503e545bf2771e92ca6d1 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 12:37:11 +0100 Subject: [PATCH 21/57] Added hashrate endpoint --- lnbits/extensions/gerty/helpers.py | 68 +++++++++++++++++++++- lnbits/extensions/gerty/number_prefixer.py | 62 ++++++++++++++++++++ lnbits/extensions/gerty/views_api.py | 44 +------------- 3 files changed, 130 insertions(+), 44 deletions(-) create mode 100644 lnbits/extensions/gerty/number_prefixer.py diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index 80ea1bc19..661601a88 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -1,7 +1,73 @@ import httpx +import textwrap + +from .number_prefixer import * + + +# A helper function get a nicely formated dict for the text +def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int = None): + # Get line size by font size + line_width = 60 + if font_size <= 12: + line_width = 75 + elif font_size <= 15: + line_width = 58 + elif font_size <= 20: + line_width = 40 + elif font_size <= 40: + line_width = 30 + else: + line_width = 20 + + # wrap the text + wrapper = textwrap.TextWrapper(width=line_width) + word_list = wrapper.wrap(text=text) + # logger.debug("number of chars = {0}".format(len(text))) + + multilineText = '\n'.join(word_list) + # logger.debug("number of lines = {0}".format(len(word_list))) + + # logger.debug('multilineText') + # logger.debug(multilineText) + + text = { + "value": multilineText, + "size": font_size + } + if x_pos is None and y_pos is None: + text['position'] = 'center' + else: + text['x'] = x_pos + text['y'] = y_pos + return text + +# format a number for nice display output +def format_number(number): + return ("{:,}".format(round(number))) + async def get_mempool_recommended_fees(gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") - return r.json() \ No newline at end of file + return r.json() + +async def api_get_mining_stat(stat_slug: str, gerty): + stat = ""; + if isinstance(gerty.mempool_endpoint, str): + async with httpx.AsyncClient() as client: + if stat_slug == "mining_current_hash_rate": + r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/3d") + data = r.json() + stat = data['currentHashrate'] + return stat + + +async def get_mining_stat(stat_slug: str, gerty): + text = [] + if stat_slug == "mining_current_hash_rate": + stat = await api_get_mining_stat(stat_slug, gerty) + stat = "{0}hash".format(si_format(stat, 6, True, " ")) + text.append(get_text_item_dict("Current Hashrate", 20)) + text.append(get_text_item_dict(stat, 40)) + return text \ No newline at end of file diff --git a/lnbits/extensions/gerty/number_prefixer.py b/lnbits/extensions/gerty/number_prefixer.py new file mode 100644 index 000000000..1ba8c024b --- /dev/null +++ b/lnbits/extensions/gerty/number_prefixer.py @@ -0,0 +1,62 @@ +import math + +def si_classifier(val): + suffixes = { + 24:{'long_suffix':'yotta', 'short_suffix':'Y', 'scalar':10**24}, + 21:{'long_suffix':'zetta', 'short_suffix':'Z', 'scalar':10**21}, + 18:{'long_suffix':'exa', 'short_suffix':'E', 'scalar':10**18}, + 15:{'long_suffix':'peta', 'short_suffix':'P', 'scalar':10**15}, + 12:{'long_suffix':'tera', 'short_suffix':'T', 'scalar':10**12}, + 9:{'long_suffix':'giga', 'short_suffix':'G', 'scalar':10**9}, + 6:{'long_suffix':'mega', 'short_suffix':'M', 'scalar':10**6}, + 3:{'long_suffix':'kilo', 'short_suffix':'k', 'scalar':10**3}, + 0:{'long_suffix':'', 'short_suffix':'', 'scalar':10**0}, + -3:{'long_suffix':'milli', 'short_suffix':'m', 'scalar':10**-3}, + -6:{'long_suffix':'micro', 'short_suffix':'µ', 'scalar':10**-6}, + -9:{'long_suffix':'nano', 'short_suffix':'n', 'scalar':10**-9}, + -12:{'long_suffix':'pico', 'short_suffix':'p', 'scalar':10**-12}, + -15:{'long_suffix':'femto', 'short_suffix':'f', 'scalar':10**-15}, + -18:{'long_suffix':'atto', 'short_suffix':'a', 'scalar':10**-18}, + -21:{'long_suffix':'zepto', 'short_suffix':'z', 'scalar':10**-21}, + -24:{'long_suffix':'yocto', 'short_suffix':'y', 'scalar':10**-24} + } + exponent = int(math.floor(math.log10(abs(val))/3.0)*3) + return suffixes.get(exponent, None) + +def si_formatter(value): + ''' + Return a triple of scaled value, short suffix, long suffix, or None if + the value cannot be classified. + ''' + classifier = si_classifier(value) + if classifier == None: + # Don't know how to classify this value + return None + + scaled = value / classifier['scalar'] + return (scaled, classifier['short_suffix'], classifier['long_suffix']) + +def si_format(value, precision=4, long_form=False, separator=''): + ''' + "SI prefix" formatted string: return a string with the given precision + and an appropriate order-of-3-magnitudes suffix, e.g.: + si_format(1001.0) => '1.00K' + si_format(0.00000000123, long_form=True, separator=' ') => '1.230 nano' + ''' + scaled, short_suffix, long_suffix = si_formatter(value) + + if scaled == None: + # Don't know how to format this value + return value + + suffix = long_suffix if long_form else short_suffix + + if abs(scaled) < 10: + precision = precision - 1 + elif abs(scaled) < 100: + precision = precision - 2 + else: + precision = precision - 3 + + return '{scaled:.{precision}f}{separator}{suffix}'.format( + scaled=scaled, precision=precision, separator=separator, suffix=suffix) \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 9838cecfb..84f9b8f3c 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -1,7 +1,6 @@ import math from http import HTTPStatus import json -import textwrap import httpx import random import os @@ -196,7 +195,7 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): elif screen_slug == "mempool_tx_count": areas.append(await get_mempool_stat(screen_slug, gerty)) elif screen_slug == "mining_current_hash_rate": - areas.append(await get_placeholder_text()) + areas.append(await get_mining_stat(screen_slug, gerty)) elif screen_slug == "mining_current_difficulty": areas.append(await get_placeholder_text()) elif screen_slug == "lightning_channel_count": @@ -309,42 +308,7 @@ async def get_exchange_rate(gerty): return text -# A helper function get a nicely formated dict for the text -def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int = None): - # Get line size by font size - line_width = 60 - if font_size <= 12: - line_width = 75 - elif font_size <= 15: - line_width = 58 - elif font_size <= 20: - line_width = 40 - elif font_size <= 40: - line_width = 30 - else: - line_width = 20 - # wrap the text - wrapper = textwrap.TextWrapper(width=line_width) - word_list = wrapper.wrap(text=text) - # logger.debug("number of chars = {0}".format(len(text))) - - multilineText = '\n'.join(word_list) - # logger.debug("number of lines = {0}".format(len(word_list))) - - # logger.debug('multilineText') - # logger.debug(multilineText) - - text = { - "value": multilineText, - "size": font_size - } - if x_pos is None and y_pos is None: - text['position'] = 'center' - else: - text['x'] = x_pos - text['y'] = y_pos - return text async def get_onchain_stat(stat_slug: str, gerty): @@ -393,7 +357,6 @@ async def get_block_height(gerty): return r.json() - async def get_mempool_stat(stat_slug: str, gerty): text = [] if isinstance(gerty.mempool_endpoint, str): @@ -452,11 +415,6 @@ def get_date_suffix(dayNumber): else: return ["st", "nd", "rd"][dayNumber % 10 - 1] -# format a number for nice display output -def format_number(number): - return ("{:,}".format(round(number))) - - def get_time_remaining(seconds, granularity=2): intervals = ( From c5b1f2870f1f3efc3d8c690fe612b9131db26998 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 13:14:31 +0100 Subject: [PATCH 22/57] added mining difficulty --- lnbits/extensions/gerty/helpers.py | 32 +++++++++++++++++++++++----- lnbits/extensions/gerty/views_api.py | 2 +- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index 661601a88..3adb872eb 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -1,5 +1,6 @@ import httpx import textwrap +from loguru import logger from .number_prefixer import * @@ -57,9 +58,17 @@ async def api_get_mining_stat(stat_slug: str, gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: if stat_slug == "mining_current_hash_rate": - r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/3d") + r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1m") data = r.json() - stat = data['currentHashrate'] + stat = {} + stat['current'] = data['currentHashrate'] + stat['1w'] = data['hashrates'][len(data['hashrates']) - 7]['avgHashrate'] + elif stat_slug == "mining_current_difficulty": + r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1m") + data = r.json() + stat = {} + stat['current'] = data['currentDifficulty'] + stat['previous'] = data['difficulty'][len(data['difficulty']) - 2]['difficulty'] return stat @@ -67,7 +76,20 @@ async def get_mining_stat(stat_slug: str, gerty): text = [] if stat_slug == "mining_current_hash_rate": stat = await api_get_mining_stat(stat_slug, gerty) - stat = "{0}hash".format(si_format(stat, 6, True, " ")) - text.append(get_text_item_dict("Current Hashrate", 20)) - text.append(get_text_item_dict(stat, 40)) + logger.debug(stat) + current = "{0}hash".format(si_format(stat['current'], 6, True, " ")) + text.append(get_text_item_dict("Current Mining Hashrate", 20)) + text.append(get_text_item_dict(current, 40)) + # compare vs previous time period + difference = (stat['current'] - stat['1w']) / stat['current'] * 100 + text.append(get_text_item_dict("{0}{1}% in last 7 days".format("+" if difference > 0 else "", round(difference, 4)), 12)) + elif stat_slug == "mining_current_difficulty": + stat = await api_get_mining_stat(stat_slug, gerty) + text.append(get_text_item_dict("Current Mining Difficulty", 20)) + text.append(get_text_item_dict(format_number(stat['current']), 40)) + difference = (stat['current'] - stat['previous']) / stat['current'] * 100 + text.append( + get_text_item_dict("{0}{1}% since last adjustment".format("+" if difference > 0 else "", round(difference, 4)), + 15)) + text.append(get_text_item_dict("Required threshold for mining proof-of-work", 12)) return text \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 84f9b8f3c..118b939df 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -197,7 +197,7 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): elif screen_slug == "mining_current_hash_rate": areas.append(await get_mining_stat(screen_slug, gerty)) elif screen_slug == "mining_current_difficulty": - areas.append(await get_placeholder_text()) + areas.append(await get_mining_stat(screen_slug, gerty)) elif screen_slug == "lightning_channel_count": areas.append(await get_placeholder_text()) elif screen_slug == "lightning_node_count": From 6ecf27e168dae0768e9a3c4d4180fabb95af685d Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 15:02:45 +0100 Subject: [PATCH 23/57] Api tweaks --- lnbits/extensions/gerty/helpers.py | 2 +- lnbits/extensions/gerty/views_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index 3adb872eb..d2048814f 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -91,5 +91,5 @@ async def get_mining_stat(stat_slug: str, gerty): text.append( get_text_item_dict("{0}{1}% since last adjustment".format("+" if difference > 0 else "", round(difference, 4)), 15)) - text.append(get_text_item_dict("Required threshold for mining proof-of-work", 12)) + # text.append(get_text_item_dict("Required threshold for mining proof-of-work", 12)) return text \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 118b939df..0be8531e7 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -289,7 +289,7 @@ async def get_pieter_wuille_fact(): quote = await api_gerty_wuille() if quote: text.append(get_text_item_dict(quote, 15)) - text.append(get_text_item_dict("Pieter Wuille facts", 15)) + # text.append(get_text_item_dict("Pieter Wuille facts", 15)) return text From 662e637758370127c03b0da80ab430bdecc74353 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 16:12:48 +0100 Subject: [PATCH 24/57] Added lightning network dashboard --- lnbits/extensions/gerty/helpers.py | 66 ++++++++++++++++--- .../gerty/templates/gerty/index.html | 43 +----------- lnbits/extensions/gerty/views_api.py | 36 ++++------ 3 files changed, 74 insertions(+), 71 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index d2048814f..cae05ff4f 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -4,6 +4,9 @@ from loguru import logger from .number_prefixer import * +def get_percent_difference(current, previous, precision=4): + difference = (current - previous) / current * 100 + return "{0}{1}%".format("+" if difference > 0 else "", round(difference, precision)) # A helper function get a nicely formated dict for the text def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int = None): @@ -43,8 +46,8 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int return text # format a number for nice display output -def format_number(number): - return ("{:,}".format(round(number))) +def format_number(number, precision=None): + return ("{:,}".format(round(number, precision))) async def get_mempool_recommended_fees(gerty): @@ -71,6 +74,51 @@ async def api_get_mining_stat(stat_slug: str, gerty): stat['previous'] = data['difficulty'][len(data['difficulty']) - 2]['difficulty'] return stat +async def api_get_lightning_stats(gerty): + stat = {} + if isinstance(gerty.mempool_endpoint, str): + async with httpx.AsyncClient() as client: + r = await client.get(gerty.mempool_endpoint + "/api/v1/lightning/statistics/latest") + data = r.json() + return data + +async def get_lightning_stats(gerty): + data = await api_get_lightning_stats(gerty) + areas = [] + + logger.debug(data['latest']['channel_count']) + + text = [] + text.append(get_text_item_dict("Channel Count", 12)) + text.append(get_text_item_dict(format_number(data['latest']['channel_count']), 20)) + difference = get_percent_difference(current=data['latest']['channel_count'], + previous=data['previous']['channel_count']) + text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) + areas.append(text) + + text = [] + text.append(get_text_item_dict("Number of Nodes", 12)) + text.append(get_text_item_dict(format_number(data['latest']['node_count']), 20)) + difference = get_percent_difference(current=data['latest']['node_count'], previous=data['previous']['node_count']) + text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) + areas.append(text) + + text = [] + text.append(get_text_item_dict("Total Capacity", 12)) + avg_capacity = float(data['latest']['total_capacity']) / float(100000000) + text.append(get_text_item_dict("{0} BTC".format(format_number(avg_capacity, 2)), 20)) + difference = get_percent_difference(current=data['latest']['total_capacity'], previous=data['previous']['total_capacity']) + text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) + areas.append(text) + + text = [] + text.append(get_text_item_dict("Average Channel Capacity", 12)) + text.append(get_text_item_dict("{0} sats".format(format_number(data['latest']['avg_capacity'])), 20)) + difference = get_percent_difference(current=data['latest']['avg_capacity'], previous=data['previous']['avg_capacity']) + text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) + areas.append(text) + + return areas async def get_mining_stat(stat_slug: str, gerty): text = [] @@ -81,15 +129,15 @@ async def get_mining_stat(stat_slug: str, gerty): text.append(get_text_item_dict("Current Mining Hashrate", 20)) text.append(get_text_item_dict(current, 40)) # compare vs previous time period - difference = (stat['current'] - stat['1w']) / stat['current'] * 100 - text.append(get_text_item_dict("{0}{1}% in last 7 days".format("+" if difference > 0 else "", round(difference, 4)), 12)) + difference = get_percent_difference(current=stat['current'], previous=stat['1w']) + text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) elif stat_slug == "mining_current_difficulty": stat = await api_get_mining_stat(stat_slug, gerty) text.append(get_text_item_dict("Current Mining Difficulty", 20)) text.append(get_text_item_dict(format_number(stat['current']), 40)) - difference = (stat['current'] - stat['previous']) / stat['current'] * 100 - text.append( - get_text_item_dict("{0}{1}% since last adjustment".format("+" if difference > 0 else "", round(difference, 4)), - 15)) + difference = get_percent_difference(current=stat['current'], previous=stat['previous']) + text.append(get_text_item_dict("{0} since last adjustment".format(difference), 12)) # text.append(get_text_item_dict("Required threshold for mining proof-of-work", 12)) - return text \ No newline at end of file + return text + + diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index d341ce987..5975ef52f 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -305,40 +305,8 @@ label="Lightning Network" > - Toggle all - -
- - - - - - - - - - -
@@ -614,12 +582,7 @@ mempool_tx_count: true, mining_current_hash_rate: true, mining_current_difficulty: true, - lightning_channel_count: true, - lightning_node_count: true, - lightning_tor_node_count: true, - lightning_clearnet_nodes: true, - lightning_unannounced_nodes: true, - lightning_average_channel_capacity: true, + lightning_dashboard: true }, lnbits_wallets: [], mempool_endpoint: "https://mempool.space", diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 0be8531e7..cc79cb655 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -133,16 +133,10 @@ async def api_gerty_json( enabled_screens.append(screen_slug) logger.debug("Screeens " + str(enabled_screens)) - text = await get_screen_text(p, enabled_screens, gerty) + data = await get_screen_data(p, enabled_screens, gerty) next_screen_number = 0 if ((p + 1) >= enabled_screen_count) else p + 1; - # ln = [] - # if gerty.ln_stats and isinstance(gerty.mempool_endpoint, str): - # async with httpx.AsyncClient() as client: - # r = await client.get(gerty.mempool_endpoint + "/api/v1/lightning/statistics/latest") - # if r: - # ln.append(r.json()) return { "settings": { @@ -155,7 +149,8 @@ async def api_gerty_json( "screen": { "slug": get_screen_slug_by_index(p, enabled_screens), "group": get_screen_slug_by_index(p, enabled_screens), - "areas": text + "title": data['title'], + "areas": data['areas'] } } @@ -166,12 +161,14 @@ def get_screen_slug_by_index(index: int, screens_list): # Get a list of text items for the screen number -async def get_screen_text(screen_num: int, screens_list: dict, gerty): +async def get_screen_data(screen_num: int, screens_list: dict, gerty): screen_slug = get_screen_slug_by_index(screen_num, screens_list) # first get the relevant slug from the display_preferences logger.debug('screen_slug') logger.debug(screen_slug) areas = [] + title = "" + if screen_slug == "dashboard": areas = await get_dashboard(gerty) if screen_slug == "lnbits_wallets_balance": @@ -198,20 +195,15 @@ async def get_screen_text(screen_num: int, screens_list: dict, gerty): areas.append(await get_mining_stat(screen_slug, gerty)) elif screen_slug == "mining_current_difficulty": areas.append(await get_mining_stat(screen_slug, gerty)) - elif screen_slug == "lightning_channel_count": - areas.append(await get_placeholder_text()) - elif screen_slug == "lightning_node_count": - areas.append(await get_placeholder_text()) - elif screen_slug == "lightning_tor_node_count": - areas.append(await get_placeholder_text()) - elif screen_slug == "lightning_clearnet_nodes": - areas.append(await get_placeholder_text()) - elif screen_slug == "lightning_unannounced_nodes": - areas.append(await get_placeholder_text()) - elif screen_slug == "lightning_average_channel_capacity": - areas.append(await get_placeholder_text()) + elif screen_slug == "lightning_dashboard": + title = "Lightning Network" + areas = await get_lightning_stats(gerty) - return areas + data = {} + data['title'] = title + data['areas'] = areas + + return data # Get the dashboard screen async def get_dashboard(gerty): From d3e6bb344442b67aec4b999642aa88197ec032e0 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 17:06:22 +0100 Subject: [PATCH 25/57] loads more work on API formatting --- .../gerty/templates/gerty/index.html | 4 +-- lnbits/extensions/gerty/views_api.py | 34 ++++++++++--------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 5975ef52f..01b38f60a 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -667,8 +667,8 @@ data ) .then(function (response) { - self.gertys.push(mapGerty(response.data)) self.formDialog.show = false + self.gertys.push(mapGerty(response.data)) }) .catch(function (error) { LNbits.utils.notifyApiError(error) @@ -689,8 +689,8 @@ self.gertys = _.reject(self.gertys, function (obj) { return obj.id == data.id }) - self.gertys.push(mapGerty(response.data)) self.formDialog.show = false + self.gertys.push(mapGerty(response.data)) }) .catch(function (error) { LNbits.utils.notifyApiError(error) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index cc79cb655..9c6b8d33b 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -172,7 +172,12 @@ async def get_screen_data(screen_num: int, screens_list: dict, gerty): if screen_slug == "dashboard": areas = await get_dashboard(gerty) if screen_slug == "lnbits_wallets_balance": - areas.append(await get_lnbits_wallet_balances(gerty)) + wallets = await get_lnbits_wallet_balances(gerty) + text = [] + for wallet in wallets: + text.append(get_text_item_dict("{0}'s Wallet".format(wallet['name']), 20)) + text.append(get_text_item_dict("{0} sats".format(format_number(wallet['balance'])), 40)) + areas.append(text) elif screen_slug == "fun_satoshi_quotes": areas.append(await get_satoshi_quotes()) elif screen_slug == "fun_pieter_wuille_facts": @@ -216,10 +221,11 @@ async def get_dashboard(gerty): areas.append(text) # balance text = [] - text.append(get_text_item_dict("Alice's wallet balance", 15)) - text.append(get_text_item_dict("102,101", 40)) - text.append(get_text_item_dict("Bob's wallet balance", 15)) - text.append(get_text_item_dict("102", 40)) + wallets = await get_lnbits_wallet_balances(gerty) + text = [] + for wallet in wallets: + text.append(get_text_item_dict("{0}'s Wallet".format(wallet['name']), 15)) + text.append(get_text_item_dict("{0} sats".format(format_number(wallet['balance'])), 40)) areas.append(text) # Mempool fees @@ -240,21 +246,17 @@ async def get_dashboard(gerty): async def get_lnbits_wallet_balances(gerty): # Get Wallet info wallets = [] - text = [] if gerty.lnbits_wallets != "": for lnbits_wallet in json.loads(gerty.lnbits_wallets): - wallet = await get_wallet_for_key(key=lnbits_wallet) - logger.debug(wallet) + logger.debug(wallet.name) if wallet: - # wallets.append({ - # "name": wallet.name, - # "balance": wallet.balance_msat, - # "inkey": wallet.inkey, - # }) - text.append(get_text_item_dict(wallet.name, 20)) - text.append(get_text_item_dict(format_number(wallet.balance_msat), 40)) - return text + wallets.append({ + "name": wallet.name, + "balance": wallet.balance_msat, + "inkey": wallet.inkey, + }) + return wallets async def get_placeholder_text(): From 9308d1b68984c091b476ac62a3c4995056dec5ad Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 17:48:49 +0100 Subject: [PATCH 26/57] Add dashboard title --- lnbits/extensions/gerty/views_api.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 9c6b8d33b..326f5cadc 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -170,6 +170,7 @@ async def get_screen_data(screen_num: int, screens_list: dict, gerty): title = "" if screen_slug == "dashboard": + title = gerty.name areas = await get_dashboard(gerty) if screen_slug == "lnbits_wallets_balance": wallets = await get_lnbits_wallet_balances(gerty) From 4a40f4edc7a2b1d97a68541fbe92ca857f208367 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 21:36:24 +0100 Subject: [PATCH 27/57] Added local next update time using pytz --- lnbits/extensions/gerty/helpers.py | 9 ++- lnbits/extensions/gerty/views_api.py | 2 +- poetry.lock | 116 +++++++++++++++++---------- pyproject.toml | 1 + 4 files changed, 84 insertions(+), 44 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index cae05ff4f..f4ea7ede8 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -1,3 +1,5 @@ +import datetime +import pytz import httpx import textwrap from loguru import logger @@ -140,4 +142,9 @@ async def get_mining_stat(stat_slug: str, gerty): # text.append(get_text_item_dict("Required threshold for mining proof-of-work", 12)) return text - +def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/London"): + utc_now = pytz.utc.localize(datetime.datetime.utcnow()) + next_refresh_time = utc_now + datetime.timedelta(0, sleep_time_seconds) + local_refresh_time = next_refresh_time.astimezone(pytz.timezone(timezone)) + # datetime.fromtimestamp(time.time()).strftime("%e %b %Y at %H:%M") + return "Next update at {0}".format(local_refresh_time.strftime("%H:%M on%e %b %Y")) \ No newline at end of file diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 326f5cadc..304abba30 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -141,7 +141,7 @@ async def api_gerty_json( return { "settings": { "refreshTime": gerty.refresh_time, - "requestTimestamp": datetime.fromtimestamp(time.time()).strftime("%e %b %Y at %H:%M"), + "requestTimestamp": get_next_update_time(gerty.refresh_time), "nextScreenNumber": next_screen_number, "showTextBoundRect": False, "name": gerty.name diff --git a/poetry.lock b/poetry.lock index 38f08575b..4cc38fa22 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,8 +20,8 @@ sniffio = ">=1.1" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] [[package]] @@ -36,7 +36,7 @@ python-versions = ">=3.6" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "atomicwrites" @@ -55,10 +55,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] +dev = ["coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "bech32" @@ -107,6 +107,9 @@ category = "main" optional = false python-versions = ">=2.7" +[package.dependencies] +setuptools = "*" + [[package]] name = "certifi" version = "2021.5.30" @@ -215,10 +218,10 @@ marshmallow = ">=3.0.0" python-dotenv = "*" [package.extras] -dev = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==3.9.2)", "flake8-bugbear (==21.4.3)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "tox"] +dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==3.9.2)", "flake8-bugbear (==21.4.3)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] django = ["dj-database-url", "dj-email-url", "django-cache-url"] lint = ["flake8 (==3.9.2)", "flake8-bugbear (==21.4.3)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] -tests = ["pytest", "dj-database-url", "dj-email-url", "django-cache-url"] +tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] [[package]] name = "fastapi" @@ -233,10 +236,10 @@ pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1. starlette = "0.19.1" [package.extras] -all = ["requests (>=2.24.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<3.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] -dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "typer (>=0.4.1,<0.5.0)", "pyyaml (>=5.3.1,<7.0.0)"] -test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<4.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==4.2.1)", "types-orjson (==3.6.2)", "types-dataclasses (==0.6.5)"] +all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "h11" @@ -290,8 +293,8 @@ rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] -brotli = ["brotlicffi", "brotli"] -cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (>=1.0.0,<2.0.0)"] @@ -316,9 +319,9 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-perf (>=0.9.2)"] [[package]] name = "iniconfig" @@ -337,10 +340,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jinja2" @@ -382,7 +385,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "tox (>=3.9.0)", "tox-travis (>=0.12)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "Sphinx (>=2.2.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "black (>=19.10b0)", "isort (>=5.1.1)"] +dev = ["Sphinx (>=2.2.1)", "black (>=19.10b0)", "codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)", "tox-travis (>=0.12)"] [[package]] name = "markupsafe" @@ -404,9 +407,9 @@ python-versions = ">=3.7" packaging = ">=17.0" [package.extras] -dev = ["pytest", "pytz", "simplejson", "mypy (==0.961)", "flake8 (==4.0.1)", "flake8-bugbear (==22.6.22)", "pre-commit (>=2.4,<3.0)", "tox"] -docs = ["sphinx (==4.5.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.8)"] -lint = ["mypy (==0.961)", "flake8 (==4.0.1)", "flake8-bugbear (==22.6.22)", "pre-commit (>=2.4,<3.0)"] +dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.6.22)", "mypy (==0.961)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.8)", "sphinx (==4.5.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.6.22)", "mypy (==0.961)", "pre-commit (>=2.4,<3.0)"] tests = ["pytest", "pytz", "simplejson"] [[package]] @@ -418,7 +421,7 @@ optional = false python-versions = ">=3.6" [package.extras] -build = ["twine", "wheel", "blurb"] +build = ["blurb", "twine", "wheel"] docs = ["sphinx"] test = ["pytest (<5.4)", "pytest-cov"] @@ -499,8 +502,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -514,8 +517,8 @@ python-versions = ">=3.6" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] -testing = ["pytest-benchmark", "pytest"] -dev = ["tox", "pre-commit"] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "psycopg2-binary" @@ -573,7 +576,7 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypng" @@ -642,7 +645,7 @@ pytest = ">=6.1.0" typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} [package.extras] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] name = "pytest-cov" @@ -657,7 +660,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["virtualenv", "pytest-xdist", "six", "process-tests", "hunter", "fields"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "python-dotenv" @@ -670,6 +673,14 @@ python-versions = ">=3.5" [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2022.4" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "pyyaml" version = "5.4.1" @@ -690,7 +701,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" six = ">=1.8.0" [package.extras] -test = ["ipython", "pytest (>=3.0.5)", "mock"] +test = ["ipython", "mock", "pytest (>=3.0.5)"] [[package]] name = "rfc3986" @@ -717,6 +728,19 @@ python-versions = "*" [package.dependencies] cffi = ">=1.3.0" +[[package]] +name = "setuptools" +version = "65.4.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "shortuuid" version = "1.0.1" @@ -754,12 +778,12 @@ mssql = ["pyodbc"] mssql_pymssql = ["pymssql"] mssql_pyodbc = ["pyodbc"] mysql = ["mysqlclient"] -oracle = ["cx-oracle"] +oracle = ["cx_oracle"] postgresql = ["psycopg2"] postgresql_pg8000 = ["pg8000 (<1.16.6)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql (<1)", "pymysql"] +pymysql = ["pymysql", "pymysql (<1)"] [[package]] name = "sqlalchemy-aio" @@ -848,7 +872,7 @@ h11 = ">=0.8" typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] -standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchfiles (>=0.13)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"] +standard = ["PyYAML (>=5.1)", "colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] [[package]] name = "uvloop" @@ -859,9 +883,9 @@ optional = false python-versions = ">=3.7" [package.extras] -dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=3.6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"] -test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"] +dev = ["Cython (>=0.29.24,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=19.0.0,<19.1.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=3.6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=19.0.0,<19.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"] [[package]] name = "watchgod" @@ -912,13 +936,13 @@ optional = false python-versions = ">=3.6" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] [metadata] lock-version = "1.1" -python-versions = "^3.9 | ^3.8 | ^3.7" -content-hash = "ac8c4117d537aaf8853d35038f2821ea4bc04b29c7971b91fd46329365008b95" +python-versions = "^3.10 | ^3.9 | ^3.8 | ^3.7" +content-hash = "401fa2739c9209df26cb1b2defaf90c5a4fcdafacc8eb2627f8d324857870281" [metadata.files] aiofiles = [ @@ -1454,6 +1478,10 @@ python-dotenv = [ {file = "python-dotenv-0.19.0.tar.gz", hash = "sha256:f521bc2ac9a8e03c736f62911605c5d83970021e3fa95b37d769e2bbbe9b6172"}, {file = "python_dotenv-0.19.0-py2.py3-none-any.whl", hash = "sha256:aae25dc1ebe97c420f50b81fb0e5c949659af713f31fdb63c749ca68748f34b1"}, ] +pytz = [ + {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, + {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, +] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, @@ -1518,6 +1546,10 @@ secp256k1 = [ {file = "secp256k1-0.14.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c9e7c024ff17e9b9d7c392bb2a917da231d6cb40ab119389ff1f51dca10339a4"}, {file = "secp256k1-0.14.0.tar.gz", hash = "sha256:82c06712d69ef945220c8b53c1a0d424c2ff6a1f64aee609030df79ad8383397"}, ] +setuptools = [ + {file = "setuptools-65.4.1-py3-none-any.whl", hash = "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012"}, + {file = "setuptools-65.4.1.tar.gz", hash = "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e"}, +] shortuuid = [ {file = "shortuuid-1.0.1-py3-none-any.whl", hash = "sha256:492c7402ff91beb1342a5898bd61ea953985bf24a41cd9f247409aa2e03c8f77"}, {file = "shortuuid-1.0.1.tar.gz", hash = "sha256:3c11d2007b915c43bee3e10625f068d8a349e04f0d81f08f5fa08507427ebf1f"}, diff --git a/pyproject.toml b/pyproject.toml index e95c6a2ee..0484ca9e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,7 @@ zipp = "3.5.0" loguru = "0.5.3" cffi = "1.15.0" websocket-client = "1.3.3" +pytz = "^2022.4" [tool.poetry.dev-dependencies] isort = "^5.10.1" From 24b87bed9f00400f74496c6a6f11bdfe76a367d9 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 21:49:47 +0100 Subject: [PATCH 28/57] Added pytz to requirements.txt --- poetry.lock | 214 ++++++++++++++++++++++------------------------- requirements.txt | 1 + 2 files changed, 102 insertions(+), 113 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4cc38fa22..8e9541fcc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,14 +38,6 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "attrs" version = "21.2.0" @@ -78,7 +70,7 @@ python-versions = "*" [[package]] name = "black" -version = "22.6.0" +version = "22.8.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -100,7 +92,7 @@ jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] -name = "cerberus" +name = "Cerberus" version = "1.3.4" description = "Lightweight, extensible schema and data validation tool for Python dictionaries." category = "main" @@ -162,7 +154,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.4.4" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -346,7 +338,7 @@ plugins = ["setuptools"] requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] -name = "jinja2" +name = "Jinja2" version = "3.0.1" description = "A very fast and expressive template engine." category = "main" @@ -388,7 +380,7 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} dev = ["Sphinx (>=2.2.1)", "black (>=19.10b0)", "codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)", "tox-travis (>=0.12)"] [[package]] -name = "markupsafe" +name = "MarkupSafe" version = "2.0.1" description = "Safely add untrusted strings to HTML/XML markup." category = "main" @@ -487,11 +479,11 @@ six = "*" [[package]] name = "pathspec" -version = "0.9.0" +version = "0.10.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [[package]] name = "platformdirs" @@ -587,7 +579,7 @@ optional = false python-versions = "*" [[package]] -name = "pyqrcode" +name = "PyQRCode" version = "1.2.1" description = "A QR code generator written purely in Python with SVG, EPS, PNG and terminal output." category = "main" @@ -598,7 +590,7 @@ python-versions = "*" PNG = ["pypng (>=0.0.13)"] [[package]] -name = "pyscss" +name = "pyScss" version = "1.4.0" description = "pyScss, a Scss compiler for Python" category = "main" @@ -612,14 +604,13 @@ six = "*" [[package]] name = "pytest" -version = "7.1.2" +version = "7.1.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -682,7 +673,7 @@ optional = false python-versions = "*" [[package]] -name = "pyyaml" +name = "PyYAML" version = "5.4.1" description = "YAML parser and emitter for Python" category = "main" @@ -690,7 +681,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] -name = "represent" +name = "Represent" version = "1.6.0.post0" description = "Create __repr__ automatically or declaratively." category = "main" @@ -766,7 +757,7 @@ optional = false python-versions = ">=3.5" [[package]] -name = "sqlalchemy" +name = "SQLAlchemy" version = "1.3.23" description = "Database Abstraction Library" category = "main" @@ -844,7 +835,7 @@ python-versions = ">=3.6" [[package]] name = "types-protobuf" -version = "3.19.22" +version = "3.20.4" description = "Typing stubs for protobuf" category = "dev" optional = false @@ -957,9 +948,6 @@ asgiref = [ {file = "asgiref-3.4.1-py3-none-any.whl", hash = "sha256:ffc141aa908e6f175673e7b1b3b7af4fdb0ecb738fc5c8b88f69f055c2415214"}, {file = "asgiref-3.4.1.tar.gz", hash = "sha256:4ef1ab46b484e3c706329cedeff284a5d40824200638503f5768edb6de7d58e9"}, ] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] attrs = [ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, @@ -974,31 +962,31 @@ bitstring = [ {file = "bitstring-3.1.9.tar.gz", hash = "sha256:a5848a3f63111785224dca8bb4c0a75b62ecdef56a042c8d6be74b16f7e860e7"}, ] black = [ - {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, - {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, - {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, - {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, - {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, - {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, - {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, - {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, - {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, - {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, - {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, - {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, - {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, - {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, - {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, - {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, - {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, - {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, - {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, + {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, + {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, + {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, + {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, + {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, + {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, + {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, + {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, + {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, + {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, + {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, + {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, + {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, + {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, + {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, + {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, + {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, ] -cerberus = [ +Cerberus = [ {file = "Cerberus-1.3.4.tar.gz", hash = "sha256:d1b21b3954b2498d9a79edf16b3170a3ac1021df88d197dc2ce5928ba519237c"}, ] certifi = [ @@ -1070,56 +1058,56 @@ colorama = [ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] coverage = [ - {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, - {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, - {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, - {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, - {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, - {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, - {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, - {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, - {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, - {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, - {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, - {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, - {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, - {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, - {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, - {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] ecdsa = [ {file = "ecdsa-0.17.0-py2.py3-none-any.whl", hash = "sha256:5cf31d5b33743abe0dfc28999036c849a69d548f994b535e527ee3cb7f3ef676"}, @@ -1205,7 +1193,7 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] -jinja2 = [ +Jinja2 = [ {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, ] @@ -1217,7 +1205,7 @@ loguru = [ {file = "loguru-0.5.3-py3-none-any.whl", hash = "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c"}, {file = "loguru-0.5.3.tar.gz", hash = "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319"}, ] -markupsafe = [ +MarkupSafe = [ {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, @@ -1338,8 +1326,8 @@ pathlib2 = [ {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"}, ] pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, @@ -1455,16 +1443,16 @@ pyparsing = [ pypng = [ {file = "pypng-0.0.21-py3-none-any.whl", hash = "sha256:76f8a1539ec56451da7ab7121f12a361969fe0f2d48d703d198ce2a99d6c5afd"}, ] -pyqrcode = [ +PyQRCode = [ {file = "PyQRCode-1.2.1.tar.gz", hash = "sha256:fdbf7634733e56b72e27f9bce46e4550b75a3a2c420414035cae9d9d26b234d5"}, {file = "PyQRCode-1.2.1.zip", hash = "sha256:1b2812775fa6ff5c527977c4cd2ccb07051ca7d0bc0aecf937a43864abe5eff6"}, ] -pyscss = [ +pyScss = [ {file = "pyScss-1.4.0.tar.gz", hash = "sha256:8f35521ffe36afa8b34c7d6f3195088a7057c185c2b8f15ee459ab19748669ff"}, ] pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, @@ -1482,7 +1470,7 @@ pytz = [ {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, ] -pyyaml = [ +PyYAML = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, @@ -1513,7 +1501,7 @@ pyyaml = [ {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] -represent = [ +Represent = [ {file = "Represent-1.6.0.post0-py2.py3-none-any.whl", hash = "sha256:99142650756ef1998ce0661568f54a47dac8c638fb27e3816c02536575dbba8c"}, {file = "Represent-1.6.0.post0.tar.gz", hash = "sha256:026c0de2ee8385d1255b9c2426cd4f03fe9177ac94c09979bc601946c8493aa0"}, ] @@ -1562,7 +1550,7 @@ sniffio = [ {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, ] -sqlalchemy = [ +SQLAlchemy = [ {file = "SQLAlchemy-1.3.23-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:fd3b96f8c705af8e938eaa99cbd8fd1450f632d38cad55e7367c33b263bf98ec"}, {file = "SQLAlchemy-1.3.23-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:29cccc9606750fe10c5d0e8bd847f17a97f3850b8682aef1f56f5d5e1a5a64b1"}, {file = "SQLAlchemy-1.3.23-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:927ce09e49bff3104459e1451ce82983b0a3062437a07d883a4c66f0b344c9b5"}, @@ -1644,8 +1632,8 @@ typed-ast = [ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] types-protobuf = [ - {file = "types-protobuf-3.19.22.tar.gz", hash = "sha256:d2b26861b0cb46a3c8669b0df507b7ef72e487da66d61f9f3576aa76ce028a83"}, - {file = "types_protobuf-3.19.22-py3-none-any.whl", hash = "sha256:d291388678af91bb045fafa864f142dc4ac22f5d4cdca097c7d8d8a32fa9b3ab"}, + {file = "types-protobuf-3.20.4.tar.gz", hash = "sha256:0dad3a5009895c985a56e2837f61902bad9594151265ac0ee907bb16d0b01eb7"}, + {file = "types_protobuf-3.20.4-py3-none-any.whl", hash = "sha256:5082437afe64ce3b31c8db109eae86e02fda11e4d5f9ac59cb8578a8a138aa70"}, ] typing-extensions = [ {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, diff --git a/requirements.txt b/requirements.txt index 697ea1d4d..fd213cebf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,6 +33,7 @@ pyparsing==3.0.9 pypng==0.20220715.0 pyqrcode==1.2.1 pyscss==1.4.0 +pytz=2022.4 python-dotenv==0.20.0 pyyaml==6.0 represent==1.6.0.post0 From 40c921c7f525e04e181702bc5b3f59ce0e39bcba Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 6 Oct 2022 22:01:40 +0100 Subject: [PATCH 29/57] Bug fix wallet balance --- lnbits/extensions/gerty/views_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 304abba30..88303dce9 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -254,7 +254,7 @@ async def get_lnbits_wallet_balances(gerty): if wallet: wallets.append({ "name": wallet.name, - "balance": wallet.balance_msat, + "balance": wallet.balance_msat / 1000, "inkey": wallet.inkey, }) return wallets From b5c992afd9f5483fc20aa5086fa97c073e6d944f Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Fri, 7 Oct 2022 10:25:54 +0100 Subject: [PATCH 30/57] Added overnight sleep for Gerty --- lnbits/extensions/gerty/helpers.py | 15 +++++++++++++-- lnbits/extensions/gerty/views_api.py | 9 +++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index f4ea7ede8..ddf9dd197 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -146,5 +146,16 @@ def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/Lo utc_now = pytz.utc.localize(datetime.datetime.utcnow()) next_refresh_time = utc_now + datetime.timedelta(0, sleep_time_seconds) local_refresh_time = next_refresh_time.astimezone(pytz.timezone(timezone)) - # datetime.fromtimestamp(time.time()).strftime("%e %b %Y at %H:%M") - return "Next update at {0}".format(local_refresh_time.strftime("%H:%M on%e %b %Y")) \ No newline at end of file + return "{0} {1}".format("I'll wake up at" if gerty_should_sleep() else "Next update at",local_refresh_time.strftime("%H:%M on%e %b %Y")) + +def gerty_should_sleep(timezone: str = "Europe/London"): + utc_now = pytz.utc.localize(datetime.datetime.utcnow()) + local_time = utc_now.astimezone(pytz.timezone(timezone)) + hours = local_time.strftime("%H") + hours = int(hours) + logger.debug("HOURS") + logger.debug(hours) + if(hours >= 22 and hours <= 23): + return True + else: + return False diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 88303dce9..3daca8bd6 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -137,11 +137,16 @@ async def api_gerty_json( next_screen_number = 0 if ((p + 1) >= enabled_screen_count) else p + 1; + # get the sleep time + sleep_time = gerty.refresh_time + if gerty_should_sleep(): + sleep_time_hours = 7 + sleep_time = 60 * sleep_time_hours return { "settings": { - "refreshTime": gerty.refresh_time, - "requestTimestamp": get_next_update_time(gerty.refresh_time), + "refreshTime": sleep_time, + "requestTimestamp": get_next_update_time(sleep_time), "nextScreenNumber": next_screen_number, "showTextBoundRect": False, "name": gerty.name From 95e303d9ba9f1f35de644620bf5cac3a5db33670 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Fri, 7 Oct 2022 22:12:48 +0100 Subject: [PATCH 31/57] bug fix for sleep hours --- lnbits/extensions/gerty/views_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 3daca8bd6..c0c643de3 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -140,8 +140,8 @@ async def api_gerty_json( # get the sleep time sleep_time = gerty.refresh_time if gerty_should_sleep(): - sleep_time_hours = 7 - sleep_time = 60 * sleep_time_hours + sleep_time_hours = 8 + sleep_time = 60 * 60 * sleep_time_hours return { "settings": { From 0b63db46db06876a4edd787181bd4ba4f0d496ef Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Sat, 8 Oct 2022 18:08:58 +0100 Subject: [PATCH 32/57] Tweak wallet balance text --- lnbits/extensions/gerty/views_api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index c0c643de3..b7cc3e74c 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -230,8 +230,8 @@ async def get_dashboard(gerty): wallets = await get_lnbits_wallet_balances(gerty) text = [] for wallet in wallets: - text.append(get_text_item_dict("{0}'s Wallet".format(wallet['name']), 15)) - text.append(get_text_item_dict("{0} sats".format(format_number(wallet['balance'])), 40)) + text.append(get_text_item_dict("{0}".format(wallet['name']), 15)) + text.append(get_text_item_dict("{0} sats".format(format_number(wallet['balance'])), 20)) areas.append(text) # Mempool fees From aa040f3a6c88972d508372b053ef917a1f3e50f8 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Mon, 10 Oct 2022 13:38:21 +0100 Subject: [PATCH 33/57] Bug fixes --- lnbits/extensions/gerty/helpers.py | 10 +++++----- lnbits/extensions/gerty/views_api.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index ddf9dd197..eb307d60c 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -1,4 +1,4 @@ -import datetime +from datetime import datetime, timedelta import pytz import httpx import textwrap @@ -143,13 +143,13 @@ async def get_mining_stat(stat_slug: str, gerty): return text def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/London"): - utc_now = pytz.utc.localize(datetime.datetime.utcnow()) - next_refresh_time = utc_now + datetime.timedelta(0, sleep_time_seconds) + utc_now = pytz.utc.localize(datetime.utcnow()) + next_refresh_time = utc_now + timedelta(0, sleep_time_seconds) local_refresh_time = next_refresh_time.astimezone(pytz.timezone(timezone)) - return "{0} {1}".format("I'll wake up at" if gerty_should_sleep() else "Next update at",local_refresh_time.strftime("%H:%M on%e %b %Y")) + return "{0} {1}".format("I'll wake up at" if gerty_should_sleep() else "Next update at",local_refresh_time.strftime("%H:%M on %e %b %Y")) def gerty_should_sleep(timezone: str = "Europe/London"): - utc_now = pytz.utc.localize(datetime.datetime.utcnow()) + utc_now = pytz.utc.localize(datetime.utcnow()) local_time = utc_now.astimezone(pytz.timezone(timezone)) hours = local_time.strftime("%H") hours = int(hours) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index b7cc3e74c..4849840a5 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -138,7 +138,7 @@ async def api_gerty_json( next_screen_number = 0 if ((p + 1) >= enabled_screen_count) else p + 1; # get the sleep time - sleep_time = gerty.refresh_time + sleep_time = gerty.refresh_time if gerty.refresh_time else 300 if gerty_should_sleep(): sleep_time_hours = 8 sleep_time = 60 * 60 * sleep_time_hours From 79e98c2f6cd9d1d1fd712db6b6fde989320d8e85 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 14:59:41 +0100 Subject: [PATCH 34/57] Removed pieter wuille quotes --- .../gerty/static/pieter_wuille.json | 24 ------------------- .../gerty/templates/gerty/index.html | 20 ---------------- lnbits/extensions/gerty/views_api.py | 18 -------------- 3 files changed, 62 deletions(-) delete mode 100644 lnbits/extensions/gerty/static/pieter_wuille.json diff --git a/lnbits/extensions/gerty/static/pieter_wuille.json b/lnbits/extensions/gerty/static/pieter_wuille.json deleted file mode 100644 index 9dec9f679..000000000 --- a/lnbits/extensions/gerty/static/pieter_wuille.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "facts": [ - "When a woman asked Pieter Wuille to talk dirty to her, he described the OpenSSL DER implementation.", - "Pieter Wuille recently visited an event horizon and escaped with a cryptographic proof.", - "Pieter Wuille's PhD thesis defence in full: \"Pieter Wuille, thank you\".", - "Pieter Wuille is an acronym for Programmatic Intelligent Encrypted Telemetric Encapsulated Recursive Witness Upscaling Integrated Load-Balancing Logical Entity.", - "Dan Bernstein only trusts one source of random numbers: Pieter Wuille.", - "Putting Pieter Wuille in the title of an r/Bitcoin submission gets more upvotes than the same post from Pieter Wuille himself.", - "Pieter Wuille won the underhanded crypto contest but his entry was so underhanded nobody even knows he entered.", - "Greg Maxwell is a bot created by Pieter Wuille to argue on reddit so he can get code done.", - "Pieter Wuille doesn't need the public key to calculate the corresponding private key.", - "When the Wikipedia servers corrupted all data including backups, Pieter Wuille had to stay awake all night to retype it.", - "It is a Bitcoin consensus rule that when Pieter's hard drive is full no more blocks can be added.", - "When they go out, Pieter Wuille pays for his parents.", - "Pieter Wuille replaced the existing monetary system by writing a few thousand lines of code.", - "Putting Pieter Wuille in the title of an r/Bitcoin submission gets more upvotes than the same post from Pieter Wuille himself.", - "Only Pieter Wuille can name things harder to pronounce than Pieter Wuille.", - "Pieter Wuille doesn't write code, he wills it into existence.", - "If every copy of the blockchain were deleted Pieter Wuille would recreate it from memory.", - "If all else fails, bitcoin should be restarted by syncing the code and the blockchain directly from Wuille's mind.", - "Pieter Wuille codes // Enlightened Zen master floats // Haikus trickle down.", - "Pieter Wuille once wrote a constant time generator for generating constant time cryptographic code." - ] -} \ No newline at end of file diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 01b38f60a..0681f2173 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -191,35 +191,16 @@ > - - - Toggle all - -
Displays random quotes from Satoshi - - Show accurate facts about Pieter Wuille - -
Date: Thu, 20 Oct 2022 15:00:49 +0100 Subject: [PATCH 35/57] Removed LNbits wallets balance screen from options --- lnbits/extensions/gerty/templates/gerty/index.html | 12 ------------ lnbits/extensions/gerty/views_api.py | 7 ------- 2 files changed, 19 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 0681f2173..bfbd029fb 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -180,17 +180,6 @@ label="Show the dashboard" > - - - - Date: Thu, 20 Oct 2022 15:09:09 +0100 Subject: [PATCH 36/57] Merged all onchain items into single dashboard --- .../gerty/templates/gerty/index.html | 72 +++++--------- lnbits/extensions/gerty/views_api.py | 93 ++++++++++--------- 2 files changed, 68 insertions(+), 97 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index bfbd029fb..e066eca74 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -176,52 +176,25 @@

Use the toggles below to control what your Gerty will display

+ v-model="formDialog.data.display_preferences.dashboard" + label="Show the dashboard" + >
- - Displays random quotes from Satoshi - - - - - Toggle all - -
- + Displays random quotes from Satoshi + + - - - - - -
+ = enabled_screen_count) else p + 1; # get the sleep time - sleep_time = gerty.refresh_time if gerty.refresh_time else 300 + sleep_time = gerty.refresh_time if gerty.refresh_time else 300 if gerty_should_sleep(): sleep_time_hours = 8 sleep_time = 60 * 60 * sleep_time_hours @@ -174,14 +175,8 @@ async def get_screen_data(screen_num: int, screens_list: dict, gerty): areas.append(await get_satoshi_quotes()) elif screen_slug == "fun_exchange_market_rate": areas.append(await get_exchange_rate(gerty)) - elif screen_slug == "onchain_difficulty_epoch_progress": - areas.append(await get_onchain_stat(screen_slug, gerty)) - elif screen_slug == "onchain_difficulty_retarget_date": - areas.append(await get_onchain_stat(screen_slug, gerty)) - elif screen_slug == "onchain_difficulty_blocks_remaining": - areas.append(await get_onchain_stat(screen_slug, gerty)) - elif screen_slug == "onchain_difficulty_epoch_time_remaining": - areas.append(await get_onchain_stat(screen_slug, gerty)) + elif screen_slug == "onchain_dashboard": + areas.append(await get_onchain_dashboard(gerty)) elif screen_slug == "mempool_recommended_fees": areas.append(await get_mempool_stat(screen_slug, gerty)) elif screen_slug == "mempool_tx_count": @@ -200,6 +195,7 @@ async def get_screen_data(screen_num: int, screens_list: dict, gerty): return data + # Get the dashboard screen async def get_dashboard(gerty): areas = [] @@ -283,39 +279,37 @@ async def get_exchange_rate(gerty): return text - - - -async def get_onchain_stat(stat_slug: str, gerty): - text = [] +async def get_onchain_dashboard(gerty): + areas = [] if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: - if ( - stat_slug == "onchain_difficulty_epoch_progress" or - stat_slug == "onchain_difficulty_retarget_date" or - stat_slug == "onchain_difficulty_blocks_remaining" or - stat_slug == "onchain_difficulty_epoch_time_remaining" - ): - r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") - if stat_slug == "onchain_difficulty_epoch_progress": - stat = round(r.json()['progressPercent']) - text.append(get_text_item_dict("Progress through current difficulty epoch", 15)) - text.append(get_text_item_dict("{0}%".format(stat), 80)) - elif stat_slug == "onchain_difficulty_retarget_date": - stat = r.json()['estimatedRetargetDate'] - dt = datetime.fromtimestamp(stat / 1000).strftime("%e %b %Y at %H:%M") - text.append(get_text_item_dict("Estimated date of next difficulty adjustment", 15)) - text.append(get_text_item_dict(dt, 40)) - elif stat_slug == "onchain_difficulty_blocks_remaining": - stat = r.json()['remainingBlocks'] - text.append(get_text_item_dict("Blocks remaining until next difficulty adjustment", 15)) - text.append(get_text_item_dict("{0}".format(format_number(stat)), 80)) - elif stat_slug == "onchain_difficulty_epoch_time_remaining": - stat = r.json()['remainingTime'] - text.append(get_text_item_dict("Blocks remaining until next difficulty adjustment", 15)) - text.append(get_text_item_dict(get_time_remaining(stat / 1000, 4), 20)) - return text + r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") + text = [] + stat = round(r.json()['progressPercent']) + text.append(get_text_item_dict("Progress through current difficulty epoch", 12)) + text.append(get_text_item_dict("{0}%".format(stat), 20)) + areas.append(text) + text = [] + stat = r.json()['estimatedRetargetDate'] + dt = datetime.fromtimestamp(stat / 1000).strftime("%e %b %Y at %H:%M") + text.append(get_text_item_dict("Estimated date of next difficulty adjustment", 12)) + text.append(get_text_item_dict(dt, 20)) + areas.append(text) + + text = [] + stat = r.json()['remainingBlocks'] + text.append(get_text_item_dict("Blocks remaining until next difficulty adjustment", 12)) + text.append(get_text_item_dict("{0}".format(format_number(stat)), 20)) + areas.append(text) + + text = [] + stat = r.json()['remainingTime'] + text.append(get_text_item_dict("Blocks remaining until next difficulty adjustment", 12)) + text.append(get_text_item_dict(get_time_remaining(stat / 1000, 4), 20)) + areas.append(text) + + return areas async def get_time_remaining_next_difficulty_adjustment(gerty): if isinstance(gerty.mempool_endpoint, str): @@ -325,6 +319,7 @@ async def get_time_remaining_next_difficulty_adjustment(gerty): time = get_time_remaining(stat / 1000, 3) return time + async def get_block_height(gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: @@ -332,6 +327,7 @@ async def get_block_height(gerty): return r.json() + async def get_mempool_stat(stat_slug: str, gerty): text = [] if isinstance(gerty.mempool_endpoint, str): @@ -345,7 +341,7 @@ async def get_mempool_stat(stat_slug: str, gerty): text.append(get_text_item_dict("Transactions in the mempool", 15)) text.append(get_text_item_dict("{0}".format(format_number(stat)), 80)) elif ( - stat_slug == "mempool_recommended_fees" + stat_slug == "mempool_recommended_fees" ): y_offset = 60 fees = await get_mempool_recommended_fees(gerty) @@ -365,33 +361,38 @@ async def get_mempool_stat(stat_slug: str, gerty): fee_append = "/vB" fee_rate = fees["economyFee"] text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), + font_size, 30, pos_y)) fee_rate = fees["hourFee"] text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), + font_size, 235, pos_y)) fee_rate = fees["halfHourFee"] text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), + font_size, 460, pos_y)) fee_rate = fees["fastestFee"] text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, + "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), + font_size, 750, pos_y)) return text + def get_date_suffix(dayNumber): if 4 <= dayNumber <= 20 or 24 <= dayNumber <= 30: return "th" else: return ["st", "nd", "rd"][dayNumber % 10 - 1] -def get_time_remaining(seconds, granularity=2): +def get_time_remaining(seconds, granularity=2): intervals = ( # ('weeks', 604800), # 60 * 60 * 24 * 7 ('days', 86400), # 60 * 60 * 24 @@ -409,4 +410,4 @@ def get_time_remaining(seconds, granularity=2): if value == 1: name = name.rstrip('s') result.append("{} {}".format(round(value), name)) - return ', '.join(result[:granularity]) \ No newline at end of file + return ', '.join(result[:granularity]) From 004cd059731b3e62f937e8c1a1737d0b7d523933 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 15:10:08 +0100 Subject: [PATCH 37/57] Mempool fees now standalone item --- .../gerty/templates/gerty/index.html | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index e066eca74..8b78a071d 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -195,29 +195,12 @@ v-model="formDialog.data.display_preferences.onchain_dashboard" label="Onchain dashboard" > - - - Toggle all - -
- -
Date: Thu, 20 Oct 2022 15:13:16 +0100 Subject: [PATCH 38/57] Moved lightninig network dashboard out of expansion group --- .../gerty/templates/gerty/index.html | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 8b78a071d..abdefbc7b 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -196,10 +196,10 @@ label="Onchain dashboard" > - + - - - + + +
Date: Thu, 20 Oct 2022 15:14:22 +0100 Subject: [PATCH 39/57] Removed mempool_tx_count from api --- lnbits/extensions/gerty/views_api.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index dfd110518..c8c4efb7c 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -179,8 +179,6 @@ async def get_screen_data(screen_num: int, screens_list: dict, gerty): areas.append(await get_onchain_dashboard(gerty)) elif screen_slug == "mempool_recommended_fees": areas.append(await get_mempool_stat(screen_slug, gerty)) - elif screen_slug == "mempool_tx_count": - areas.append(await get_mempool_stat(screen_slug, gerty)) elif screen_slug == "mining_current_hash_rate": areas.append(await get_mining_stat(screen_slug, gerty)) elif screen_slug == "mining_current_difficulty": From 9f3b4c48ea2e3a4126c1aed4a9d0f182227140ae Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 15:59:55 +0100 Subject: [PATCH 40/57] Created mining dashboard --- lnbits/extensions/gerty/helpers.py | 112 ++++++++++++------ .../gerty/templates/gerty/index.html | 33 +----- lnbits/extensions/gerty/views_api.py | 40 +------ 3 files changed, 87 insertions(+), 98 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index eb307d60c..c29139fbd 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -58,23 +58,54 @@ async def get_mempool_recommended_fees(gerty): r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") return r.json() -async def api_get_mining_stat(stat_slug: str, gerty): - stat = ""; +async def get_mining_dashboard(gerty): + areas = [] if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: - if stat_slug == "mining_current_hash_rate": - r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1m") - data = r.json() - stat = {} - stat['current'] = data['currentHashrate'] - stat['1w'] = data['hashrates'][len(data['hashrates']) - 7]['avgHashrate'] - elif stat_slug == "mining_current_difficulty": - r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1m") - data = r.json() - stat = {} - stat['current'] = data['currentDifficulty'] - stat['previous'] = data['difficulty'][len(data['difficulty']) - 2]['difficulty'] - return stat + # current hashrate + r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1w") + data = r.json() + hashrateNow = data['currentHashrate'] + hashrateOneWeekAgo = data['hashrates'][6]['avgHashrate'] + + text = [] + text.append(get_text_item_dict("Current mining hashrate", 12)) + text.append(get_text_item_dict("{0}hash".format(si_format(hashrateNow, 6, True, " ")), 20)) + text.append(get_text_item_dict("{0} vs 7 days ago".format(get_percent_difference(hashrateNow, hashrateOneWeekAgo, 3)), 12)) + areas.append(text) + + r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") + + # timeAvg + text = [] + time_avg = r.json()['timeAvg'] / 1000 + hours, remainder = divmod(time_avg, 3600) + minutes, seconds = divmod(remainder, 60) + time_avg = '{:02} minutes {:02} seconds'.format(int(minutes), int(seconds)) + text.append(get_text_item_dict("Current block time", 12)) + text.append(get_text_item_dict(str(time_avg), 20)) + areas.append(text) + + # difficulty adjustment + text = [] + stat = r.json()['remainingTime'] + text.append(get_text_item_dict("Time to next difficulty adjustment", 12)) + text.append(get_text_item_dict(get_time_remaining(stat / 1000, 3), 20)) + areas.append(text) + + # difficultyChange + text = [] + difficultyChange = round(r.json()['difficultyChange'], 2) + text.append(get_text_item_dict("Estimated difficulty change", 12)) + text.append(get_text_item_dict("{0}{1}%".format("+" if difficultyChange > 0 else "", round(difficultyChange, 2)), 20)) + areas.append(text) + + r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1m") + data = r.json() + stat = {} + stat['current'] = data['currentDifficulty'] + stat['previous'] = data['difficulty'][len(data['difficulty']) - 2]['difficulty'] + return areas async def api_get_lightning_stats(gerty): stat = {} @@ -88,8 +119,6 @@ async def get_lightning_stats(gerty): data = await api_get_lightning_stats(gerty) areas = [] - logger.debug(data['latest']['channel_count']) - text = [] text.append(get_text_item_dict("Channel Count", 12)) text.append(get_text_item_dict(format_number(data['latest']['channel_count']), 20)) @@ -122,26 +151,6 @@ async def get_lightning_stats(gerty): return areas -async def get_mining_stat(stat_slug: str, gerty): - text = [] - if stat_slug == "mining_current_hash_rate": - stat = await api_get_mining_stat(stat_slug, gerty) - logger.debug(stat) - current = "{0}hash".format(si_format(stat['current'], 6, True, " ")) - text.append(get_text_item_dict("Current Mining Hashrate", 20)) - text.append(get_text_item_dict(current, 40)) - # compare vs previous time period - difference = get_percent_difference(current=stat['current'], previous=stat['1w']) - text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) - elif stat_slug == "mining_current_difficulty": - stat = await api_get_mining_stat(stat_slug, gerty) - text.append(get_text_item_dict("Current Mining Difficulty", 20)) - text.append(get_text_item_dict(format_number(stat['current']), 40)) - difference = get_percent_difference(current=stat['current'], previous=stat['previous']) - text.append(get_text_item_dict("{0} since last adjustment".format(difference), 12)) - # text.append(get_text_item_dict("Required threshold for mining proof-of-work", 12)) - return text - def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/London"): utc_now = pytz.utc.localize(datetime.utcnow()) next_refresh_time = utc_now + timedelta(0, sleep_time_seconds) @@ -159,3 +168,32 @@ def gerty_should_sleep(timezone: str = "Europe/London"): return True else: return False + + + +def get_date_suffix(dayNumber): + if 4 <= dayNumber <= 20 or 24 <= dayNumber <= 30: + return "th" + else: + return ["st", "nd", "rd"][dayNumber % 10 - 1] + + +def get_time_remaining(seconds, granularity=2): + intervals = ( + # ('weeks', 604800), # 60 * 60 * 24 * 7 + ('days', 86400), # 60 * 60 * 24 + ('hours', 3600), # 60 * 60 + ('minutes', 60), + ('seconds', 1), + ) + + result = [] + + for name, count in intervals: + value = seconds // count + if value: + seconds -= value * count + if value == 1: + name = name.rstrip('s') + result.append("{} {}".format(round(value), name)) + return ', '.join(result[:granularity]) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index abdefbc7b..d6e779cfa 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -201,36 +201,16 @@ label="mempool.space recommended fees" > - - - - Toggle all - -
- - - -
- + - +
Date: Thu, 20 Oct 2022 16:02:21 +0100 Subject: [PATCH 41/57] Replaced current block itme with progress through current epoch --- lnbits/extensions/gerty/helpers.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index c29139fbd..3f7f88e3e 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -78,12 +78,9 @@ async def get_mining_dashboard(gerty): # timeAvg text = [] - time_avg = r.json()['timeAvg'] / 1000 - hours, remainder = divmod(time_avg, 3600) - minutes, seconds = divmod(remainder, 60) - time_avg = '{:02} minutes {:02} seconds'.format(int(minutes), int(seconds)) - text.append(get_text_item_dict("Current block time", 12)) - text.append(get_text_item_dict(str(time_avg), 20)) + progress = "{0}%".format(round(r.json()['progressPercent'], 2)) + text.append(get_text_item_dict("Progress through current epoch", 12)) + text.append(get_text_item_dict(progress, 20)) areas.append(text) # difficulty adjustment From dc02601db1d05cc140374c45b7d69840748fcaba Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 16:03:28 +0100 Subject: [PATCH 42/57] Fixed casing on Gerty extension modal UI --- lnbits/extensions/gerty/templates/gerty/index.html | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index d6e779cfa..7007c8cdc 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -177,7 +177,7 @@ Date: Thu, 20 Oct 2022 16:05:15 +0100 Subject: [PATCH 43/57] Removed unused toggleAll logic from gerty modal --- .../gerty/templates/gerty/index.html | 32 ++----------------- 1 file changed, 2 insertions(+), 30 deletions(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 7007c8cdc..7dc39fed5 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -218,11 +218,12 @@ :disable="formDialog.data.wallet == null || formDialog.data.name == null" type="submit" class="q-mr-md" + v-if="!formDialog.data.id" >Create Gerty From 1c56622d89e358e5817858f9beee335c76455403 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 16:14:45 +0100 Subject: [PATCH 44/57] formatted --- lnbits/extensions/gerty/views_api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index ec2612d39..4151e7a00 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -309,6 +309,7 @@ async def get_onchain_dashboard(gerty): return areas + async def get_time_remaining_next_difficulty_adjustment(gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: @@ -380,4 +381,4 @@ async def get_mempool_stat(stat_slug: str, gerty): "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), font_size, 750, pos_y)) - return text \ No newline at end of file + return text From 07042a7d4ff0f6602e10c6d8703730778e24b20b Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 16:31:09 +0100 Subject: [PATCH 45/57] Formatted gerty extension --- lnbits/core/views/api.py | 1 + lnbits/core/views/generic.py | 3 +- lnbits/core/views/public_api.py | 1 + lnbits/extensions/gerty/__init__.py | 2 + lnbits/extensions/gerty/crud.py | 4 +- lnbits/extensions/gerty/helpers.py | 134 +- lnbits/extensions/gerty/migrations.py | 2 +- lnbits/extensions/gerty/models.py | 13 +- lnbits/extensions/gerty/number_prefixer.py | 58 +- .../gerty/templates/gerty/_api_docs.html | 3 +- .../gerty/templates/gerty/gerty.html | 93 +- .../gerty/templates/gerty/index.html | 1243 +++++++++-------- lnbits/extensions/gerty/views.py | 12 +- lnbits/extensions/gerty/views_api.py | 236 ++-- tests/core/views/test_api.py | 5 +- tests/extensions/boltz/conftest.py | 4 +- tests/mocks.py | 1 + 17 files changed, 988 insertions(+), 827 deletions(-) diff --git a/lnbits/core/views/api.py b/lnbits/core/views/api.py index c33e874cb..b533a8e2c 100644 --- a/lnbits/core/views/api.py +++ b/lnbits/core/views/api.py @@ -34,6 +34,7 @@ from lnbits.utils.exchange_rates import ( fiat_amount_as_satoshis, satoshis_amount_as_fiat, ) + from .. import core_app, db from ..crud import ( create_payment, diff --git a/lnbits/core/views/generic.py b/lnbits/core/views/generic.py index fcc0365bf..31a7b0300 100644 --- a/lnbits/core/views/generic.py +++ b/lnbits/core/views/generic.py @@ -22,6 +22,8 @@ from lnbits.settings import ( LNBITS_SITE_TITLE, SERVICE_FEE, ) + +from ...helpers import get_valid_extensions from ..crud import ( create_account, create_wallet, @@ -32,7 +34,6 @@ from ..crud import ( update_user_extension, ) from ..services import pay_invoice, redeem_lnurl_withdraw -from ...helpers import get_valid_extensions core_html_routes: APIRouter = APIRouter(tags=["Core NON-API Website Routes"]) diff --git a/lnbits/core/views/public_api.py b/lnbits/core/views/public_api.py index 465693d9a..ef8dc056c 100644 --- a/lnbits/core/views/public_api.py +++ b/lnbits/core/views/public_api.py @@ -8,6 +8,7 @@ from loguru import logger from starlette.requests import Request from lnbits import bolt11 + from .. import core_app from ..crud import get_standalone_payment from ..tasks import api_invoice_listeners diff --git a/lnbits/extensions/gerty/__init__.py b/lnbits/extensions/gerty/__init__.py index c5f526b54..03fdef12b 100644 --- a/lnbits/extensions/gerty/__init__.py +++ b/lnbits/extensions/gerty/__init__.py @@ -11,8 +11,10 @@ db = Database("ext_gerty") gerty_ext: APIRouter = APIRouter(prefix="/gerty", tags=["Gerty"]) + def gerty_renderer(): return template_renderer(["lnbits/extensions/gerty/templates"]) + from .views import * # noqa from .views_api import * # noqa diff --git a/lnbits/extensions/gerty/crud.py b/lnbits/extensions/gerty/crud.py index a472ef37f..10b17df1e 100644 --- a/lnbits/extensions/gerty/crud.py +++ b/lnbits/extensions/gerty/crud.py @@ -28,7 +28,7 @@ async def create_gerty(wallet_id: str, data: Gerty) -> Gerty: data.lnbits_wallets, data.mempool_endpoint, data.exchange, - data.display_preferences + data.display_preferences, ), ) @@ -36,6 +36,7 @@ async def create_gerty(wallet_id: str, data: Gerty) -> Gerty: assert gerty, "Newly created gerty couldn't be retrieved" return gerty + async def update_gerty(gerty_id: str, **kwargs) -> Gerty: q = ", ".join([f"{field[0]} = ?" for field in kwargs.items()]) await db.execute( @@ -43,6 +44,7 @@ async def update_gerty(gerty_id: str, **kwargs) -> Gerty: ) return await get_gerty(gerty_id) + async def get_gerty(gerty_id: str) -> Optional[Gerty]: row = await db.fetchone("SELECT * FROM gerty.gertys WHERE id = ?", (gerty_id,)) return Gerty(**row) if row else None diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index 3f7f88e3e..b2c757a68 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -1,15 +1,18 @@ -from datetime import datetime, timedelta -import pytz -import httpx import textwrap +from datetime import datetime, timedelta + +import httpx +import pytz from loguru import logger from .number_prefixer import * + def get_percent_difference(current, previous, precision=4): difference = (current - previous) / current * 100 return "{0}{1}%".format("+" if difference > 0 else "", round(difference, precision)) + # A helper function get a nicely formated dict for the text def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int = None): # Get line size by font size @@ -30,26 +33,24 @@ def get_text_item_dict(text: str, font_size: int, x_pos: int = None, y_pos: int word_list = wrapper.wrap(text=text) # logger.debug("number of chars = {0}".format(len(text))) - multilineText = '\n'.join(word_list) + multilineText = "\n".join(word_list) # logger.debug("number of lines = {0}".format(len(word_list))) # logger.debug('multilineText') # logger.debug(multilineText) - text = { - "value": multilineText, - "size": font_size - } + text = {"value": multilineText, "size": font_size} if x_pos is None and y_pos is None: - text['position'] = 'center' + text["position"] = "center" else: - text['x'] = x_pos - text['y'] = y_pos + text["x"] = x_pos + text["y"] = y_pos return text + # format a number for nice display output def format_number(number, precision=None): - return ("{:,}".format(round(number, precision))) + return "{:,}".format(round(number, precision)) async def get_mempool_recommended_fees(gerty): @@ -58,6 +59,7 @@ async def get_mempool_recommended_fees(gerty): r = await client.get(gerty.mempool_endpoint + "/api/v1/fees/recommended") return r.json() + async def get_mining_dashboard(gerty): areas = [] if isinstance(gerty.mempool_endpoint, str): @@ -65,94 +67,141 @@ async def get_mining_dashboard(gerty): # current hashrate r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1w") data = r.json() - hashrateNow = data['currentHashrate'] - hashrateOneWeekAgo = data['hashrates'][6]['avgHashrate'] + hashrateNow = data["currentHashrate"] + hashrateOneWeekAgo = data["hashrates"][6]["avgHashrate"] text = [] text.append(get_text_item_dict("Current mining hashrate", 12)) - text.append(get_text_item_dict("{0}hash".format(si_format(hashrateNow, 6, True, " ")), 20)) - text.append(get_text_item_dict("{0} vs 7 days ago".format(get_percent_difference(hashrateNow, hashrateOneWeekAgo, 3)), 12)) + text.append( + get_text_item_dict( + "{0}hash".format(si_format(hashrateNow, 6, True, " ")), 20 + ) + ) + text.append( + get_text_item_dict( + "{0} vs 7 days ago".format( + get_percent_difference(hashrateNow, hashrateOneWeekAgo, 3) + ), + 12, + ) + ) areas.append(text) - r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") + r = await client.get( + gerty.mempool_endpoint + "/api/v1/difficulty-adjustment" + ) # timeAvg text = [] - progress = "{0}%".format(round(r.json()['progressPercent'], 2)) + progress = "{0}%".format(round(r.json()["progressPercent"], 2)) text.append(get_text_item_dict("Progress through current epoch", 12)) text.append(get_text_item_dict(progress, 20)) areas.append(text) # difficulty adjustment text = [] - stat = r.json()['remainingTime'] + stat = r.json()["remainingTime"] text.append(get_text_item_dict("Time to next difficulty adjustment", 12)) text.append(get_text_item_dict(get_time_remaining(stat / 1000, 3), 20)) areas.append(text) # difficultyChange text = [] - difficultyChange = round(r.json()['difficultyChange'], 2) + difficultyChange = round(r.json()["difficultyChange"], 2) text.append(get_text_item_dict("Estimated difficulty change", 12)) - text.append(get_text_item_dict("{0}{1}%".format("+" if difficultyChange > 0 else "", round(difficultyChange, 2)), 20)) + text.append( + get_text_item_dict( + "{0}{1}%".format( + "+" if difficultyChange > 0 else "", round(difficultyChange, 2) + ), + 20, + ) + ) areas.append(text) r = await client.get(gerty.mempool_endpoint + "/api/v1/mining/hashrate/1m") data = r.json() stat = {} - stat['current'] = data['currentDifficulty'] - stat['previous'] = data['difficulty'][len(data['difficulty']) - 2]['difficulty'] + stat["current"] = data["currentDifficulty"] + stat["previous"] = data["difficulty"][len(data["difficulty"]) - 2][ + "difficulty" + ] return areas + async def api_get_lightning_stats(gerty): stat = {} if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: - r = await client.get(gerty.mempool_endpoint + "/api/v1/lightning/statistics/latest") + r = await client.get( + gerty.mempool_endpoint + "/api/v1/lightning/statistics/latest" + ) data = r.json() return data + async def get_lightning_stats(gerty): data = await api_get_lightning_stats(gerty) areas = [] text = [] text.append(get_text_item_dict("Channel Count", 12)) - text.append(get_text_item_dict(format_number(data['latest']['channel_count']), 20)) - difference = get_percent_difference(current=data['latest']['channel_count'], - previous=data['previous']['channel_count']) + text.append(get_text_item_dict(format_number(data["latest"]["channel_count"]), 20)) + difference = get_percent_difference( + current=data["latest"]["channel_count"], + previous=data["previous"]["channel_count"], + ) text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) areas.append(text) text = [] text.append(get_text_item_dict("Number of Nodes", 12)) - text.append(get_text_item_dict(format_number(data['latest']['node_count']), 20)) - difference = get_percent_difference(current=data['latest']['node_count'], previous=data['previous']['node_count']) + text.append(get_text_item_dict(format_number(data["latest"]["node_count"]), 20)) + difference = get_percent_difference( + current=data["latest"]["node_count"], previous=data["previous"]["node_count"] + ) text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) areas.append(text) text = [] text.append(get_text_item_dict("Total Capacity", 12)) - avg_capacity = float(data['latest']['total_capacity']) / float(100000000) - text.append(get_text_item_dict("{0} BTC".format(format_number(avg_capacity, 2)), 20)) - difference = get_percent_difference(current=data['latest']['total_capacity'], previous=data['previous']['total_capacity']) + avg_capacity = float(data["latest"]["total_capacity"]) / float(100000000) + text.append( + get_text_item_dict("{0} BTC".format(format_number(avg_capacity, 2)), 20) + ) + difference = get_percent_difference( + current=data["latest"]["total_capacity"], + previous=data["previous"]["total_capacity"], + ) text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) areas.append(text) text = [] text.append(get_text_item_dict("Average Channel Capacity", 12)) - text.append(get_text_item_dict("{0} sats".format(format_number(data['latest']['avg_capacity'])), 20)) - difference = get_percent_difference(current=data['latest']['avg_capacity'], previous=data['previous']['avg_capacity']) + text.append( + get_text_item_dict( + "{0} sats".format(format_number(data["latest"]["avg_capacity"])), 20 + ) + ) + difference = get_percent_difference( + current=data["latest"]["avg_capacity"], + previous=data["previous"]["avg_capacity"], + ) text.append(get_text_item_dict("{0} in last 7 days".format(difference), 12)) areas.append(text) return areas + def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/London"): utc_now = pytz.utc.localize(datetime.utcnow()) next_refresh_time = utc_now + timedelta(0, sleep_time_seconds) local_refresh_time = next_refresh_time.astimezone(pytz.timezone(timezone)) - return "{0} {1}".format("I'll wake up at" if gerty_should_sleep() else "Next update at",local_refresh_time.strftime("%H:%M on %e %b %Y")) + return "{0} {1}".format( + "I'll wake up at" if gerty_should_sleep() else "Next update at", + local_refresh_time.strftime("%H:%M on %e %b %Y"), + ) + def gerty_should_sleep(timezone: str = "Europe/London"): utc_now = pytz.utc.localize(datetime.utcnow()) @@ -161,13 +210,12 @@ def gerty_should_sleep(timezone: str = "Europe/London"): hours = int(hours) logger.debug("HOURS") logger.debug(hours) - if(hours >= 22 and hours <= 23): + if hours >= 22 and hours <= 23: return True else: return False - def get_date_suffix(dayNumber): if 4 <= dayNumber <= 20 or 24 <= dayNumber <= 30: return "th" @@ -178,10 +226,10 @@ def get_date_suffix(dayNumber): def get_time_remaining(seconds, granularity=2): intervals = ( # ('weeks', 604800), # 60 * 60 * 24 * 7 - ('days', 86400), # 60 * 60 * 24 - ('hours', 3600), # 60 * 60 - ('minutes', 60), - ('seconds', 1), + ("days", 86400), # 60 * 60 * 24 + ("hours", 3600), # 60 * 60 + ("minutes", 60), + ("seconds", 1), ) result = [] @@ -191,6 +239,6 @@ def get_time_remaining(seconds, granularity=2): if value: seconds -= value * count if value == 1: - name = name.rstrip('s') + name = name.rstrip("s") result.append("{} {}".format(round(value), name)) - return ', '.join(result[:granularity]) + return ", ".join(result[:granularity]) diff --git a/lnbits/extensions/gerty/migrations.py b/lnbits/extensions/gerty/migrations.py index 459fc8807..0e15b68e2 100644 --- a/lnbits/extensions/gerty/migrations.py +++ b/lnbits/extensions/gerty/migrations.py @@ -15,4 +15,4 @@ async def m001_initial(db): display_preferences TEXT ); """ - ) \ No newline at end of file + ) diff --git a/lnbits/extensions/gerty/models.py b/lnbits/extensions/gerty/models.py index fc7a33774..89707a86a 100644 --- a/lnbits/extensions/gerty/models.py +++ b/lnbits/extensions/gerty/models.py @@ -4,16 +4,21 @@ from typing import Optional from fastapi import Query from pydantic import BaseModel + class Gerty(BaseModel): id: str = Query(None) name: str wallet: str refresh_time: int = Query(None) - lnbits_wallets: str = Query(None) # Wallets to keep an eye on, {"wallet-id": "wallet-read-key, etc"} - mempool_endpoint: str = Query(None) # Mempool endpoint to use - exchange: str = Query(None) # BTC <-> Fiat exchange rate to pull ie "USD", in 0.0001 and sats + lnbits_wallets: str = Query( + None + ) # Wallets to keep an eye on, {"wallet-id": "wallet-read-key, etc"} + mempool_endpoint: str = Query(None) # Mempool endpoint to use + exchange: str = Query( + None + ) # BTC <-> Fiat exchange rate to pull ie "USD", in 0.0001 and sats display_preferences: str = Query(None) @classmethod def from_row(cls, row: Row) -> "Gerty": - return cls(**dict(row)) \ No newline at end of file + return cls(**dict(row)) diff --git a/lnbits/extensions/gerty/number_prefixer.py b/lnbits/extensions/gerty/number_prefixer.py index 1ba8c024b..eab684e7a 100644 --- a/lnbits/extensions/gerty/number_prefixer.py +++ b/lnbits/extensions/gerty/number_prefixer.py @@ -1,48 +1,51 @@ import math + def si_classifier(val): suffixes = { - 24:{'long_suffix':'yotta', 'short_suffix':'Y', 'scalar':10**24}, - 21:{'long_suffix':'zetta', 'short_suffix':'Z', 'scalar':10**21}, - 18:{'long_suffix':'exa', 'short_suffix':'E', 'scalar':10**18}, - 15:{'long_suffix':'peta', 'short_suffix':'P', 'scalar':10**15}, - 12:{'long_suffix':'tera', 'short_suffix':'T', 'scalar':10**12}, - 9:{'long_suffix':'giga', 'short_suffix':'G', 'scalar':10**9}, - 6:{'long_suffix':'mega', 'short_suffix':'M', 'scalar':10**6}, - 3:{'long_suffix':'kilo', 'short_suffix':'k', 'scalar':10**3}, - 0:{'long_suffix':'', 'short_suffix':'', 'scalar':10**0}, - -3:{'long_suffix':'milli', 'short_suffix':'m', 'scalar':10**-3}, - -6:{'long_suffix':'micro', 'short_suffix':'µ', 'scalar':10**-6}, - -9:{'long_suffix':'nano', 'short_suffix':'n', 'scalar':10**-9}, - -12:{'long_suffix':'pico', 'short_suffix':'p', 'scalar':10**-12}, - -15:{'long_suffix':'femto', 'short_suffix':'f', 'scalar':10**-15}, - -18:{'long_suffix':'atto', 'short_suffix':'a', 'scalar':10**-18}, - -21:{'long_suffix':'zepto', 'short_suffix':'z', 'scalar':10**-21}, - -24:{'long_suffix':'yocto', 'short_suffix':'y', 'scalar':10**-24} + 24: {"long_suffix": "yotta", "short_suffix": "Y", "scalar": 10**24}, + 21: {"long_suffix": "zetta", "short_suffix": "Z", "scalar": 10**21}, + 18: {"long_suffix": "exa", "short_suffix": "E", "scalar": 10**18}, + 15: {"long_suffix": "peta", "short_suffix": "P", "scalar": 10**15}, + 12: {"long_suffix": "tera", "short_suffix": "T", "scalar": 10**12}, + 9: {"long_suffix": "giga", "short_suffix": "G", "scalar": 10**9}, + 6: {"long_suffix": "mega", "short_suffix": "M", "scalar": 10**6}, + 3: {"long_suffix": "kilo", "short_suffix": "k", "scalar": 10**3}, + 0: {"long_suffix": "", "short_suffix": "", "scalar": 10**0}, + -3: {"long_suffix": "milli", "short_suffix": "m", "scalar": 10**-3}, + -6: {"long_suffix": "micro", "short_suffix": "µ", "scalar": 10**-6}, + -9: {"long_suffix": "nano", "short_suffix": "n", "scalar": 10**-9}, + -12: {"long_suffix": "pico", "short_suffix": "p", "scalar": 10**-12}, + -15: {"long_suffix": "femto", "short_suffix": "f", "scalar": 10**-15}, + -18: {"long_suffix": "atto", "short_suffix": "a", "scalar": 10**-18}, + -21: {"long_suffix": "zepto", "short_suffix": "z", "scalar": 10**-21}, + -24: {"long_suffix": "yocto", "short_suffix": "y", "scalar": 10**-24}, } - exponent = int(math.floor(math.log10(abs(val))/3.0)*3) + exponent = int(math.floor(math.log10(abs(val)) / 3.0) * 3) return suffixes.get(exponent, None) + def si_formatter(value): - ''' + """ Return a triple of scaled value, short suffix, long suffix, or None if the value cannot be classified. - ''' + """ classifier = si_classifier(value) if classifier == None: # Don't know how to classify this value return None - scaled = value / classifier['scalar'] - return (scaled, classifier['short_suffix'], classifier['long_suffix']) + scaled = value / classifier["scalar"] + return (scaled, classifier["short_suffix"], classifier["long_suffix"]) -def si_format(value, precision=4, long_form=False, separator=''): - ''' + +def si_format(value, precision=4, long_form=False, separator=""): + """ "SI prefix" formatted string: return a string with the given precision and an appropriate order-of-3-magnitudes suffix, e.g.: si_format(1001.0) => '1.00K' si_format(0.00000000123, long_form=True, separator=' ') => '1.230 nano' - ''' + """ scaled, short_suffix, long_suffix = si_formatter(value) if scaled == None: @@ -58,5 +61,6 @@ def si_format(value, precision=4, long_form=False, separator=''): else: precision = precision - 3 - return '{scaled:.{precision}f}{separator}{suffix}'.format( - scaled=scaled, precision=precision, separator=separator, suffix=suffix) \ No newline at end of file + return "{scaled:.{precision}f}{separator}{suffix}".format( + scaled=scaled, precision=precision, separator=separator, suffix=suffix + ) diff --git a/lnbits/extensions/gerty/templates/gerty/_api_docs.html b/lnbits/extensions/gerty/templates/gerty/_api_docs.html index 889760e19..db1412799 100644 --- a/lnbits/extensions/gerty/templates/gerty/_api_docs.html +++ b/lnbits/extensions/gerty/templates/gerty/_api_docs.html @@ -71,7 +71,8 @@
Curl example
curl -X DELETE {{ request.base_url - }}gerty/api/v1/gertys/<gerty_id> -H "X-Api-Key: <admin_key>" + }}gerty/api/v1/gertys/<gerty_id> -H "X-Api-Key: + <admin_key>" diff --git a/lnbits/extensions/gerty/templates/gerty/gerty.html b/lnbits/extensions/gerty/templates/gerty/gerty.html index e4401fe14..216e57213 100644 --- a/lnbits/extensions/gerty/templates/gerty/gerty.html +++ b/lnbits/extensions/gerty/templates/gerty/gerty.html @@ -1,40 +1,74 @@ -{% extends "public.html" %} {% block toolbar_title %} {{ gerty.name }}{% endblock %}{% block page %} -{% raw %} +{% extends "public.html" %} {% block toolbar_title %} {{ gerty.name }}{% +endblock %}{% block page %} {% raw %}
- - "{{gerty.sats_quote[0].text}}"
~ Satoshi {{gerty.sats_quote[0].date}} -
+ + "{{gerty.sats_quote[0].text}}"
~ Satoshi {{gerty.sats_quote[0].date}} +
- + {{gerty.exchange[0].amount.toFixed(2)}} {{gerty.exchange[0].fiat}} - - - -
-
-
+ + + +
+ +
+
-
-
{{gertywallet.amount}}
{{gertywallet.name}}
-
-
-
+
+
+ {{gertywallet.amount}} +
+
{{gertywallet.name}}
+
+
+
-
+
-

Onchain Stats

- Difficulty Progress Percent - -
- -
-
+

Onchain Stats

+ Difficulty Progress Percent + +
+ +
+
@@ -46,17 +80,14 @@
-

LN Stats

+

LN Stats

-
- {{gerty.ln}} -
+
{{gerty.ln}}
-{% endraw %} -{% endblock %} {% block scripts %} +{% endraw %} {% endblock %} {% block scripts %} + LNbits.utils + .confirmDialog('Are you sure you want to delete this Gerty?') + .onOk(function () { + LNbits.api + .request( + 'DELETE', + '/gerty/api/v1/gerty/' + gertyId, + _.findWhere(self.g.user.wallets, {id: gerty.wallet}).adminkey + ) + .then(function (response) { + self.gertys = _.reject(self.gertys, function (obj) { + return obj.id == gertyId + }) + }) + .catch(function (error) { + LNbits.utils.notifyApiError(error) + }) + }) + }, + exportCSV: function () { + LNbits.utils.exportCSV(this.gertysTable.columns, this.gertys) + } + }, + created: function () { + if (this.g.user.wallets.length) { + this.getGertys() + } + } + }) + +{% endblock %} {% block styles %} + {% endblock %} - -{% block styles %} - -{% endblock %} \ No newline at end of file diff --git a/lnbits/extensions/gerty/views.py b/lnbits/extensions/gerty/views.py index 630cb48bf..e05861691 100644 --- a/lnbits/extensions/gerty/views.py +++ b/lnbits/extensions/gerty/views.py @@ -1,8 +1,10 @@ +import json from http import HTTPStatus from fastapi import Request from fastapi.params import Depends from fastapi.templating import Jinja2Templates +from loguru import logger from starlette.exceptions import HTTPException from starlette.responses import HTMLResponse @@ -14,18 +16,16 @@ from . import gerty_ext, gerty_renderer from .crud import get_gerty from .views_api import api_gerty_json -import json - -from loguru import logger - templates = Jinja2Templates(directory="templates") + @gerty_ext.get("/", response_class=HTMLResponse) async def index(request: Request, user: User = Depends(check_user_exists)): return gerty_renderer().TemplateResponse( "gerty/index.html", {"request": request, "user": user.dict()} ) + @gerty_ext.get("/{gerty_id}", response_class=HTMLResponse) async def display(request: Request, gerty_id): gerty = await get_gerty(gerty_id) @@ -34,4 +34,6 @@ async def display(request: Request, gerty_id): status_code=HTTPStatus.NOT_FOUND, detail="Gerty does not exist." ) gertyData = await api_gerty_json(gerty_id) - return gerty_renderer().TemplateResponse("gerty/gerty.html", {"request": request, "gerty": gertyData}) \ No newline at end of file + return gerty_renderer().TemplateResponse( + "gerty/gerty.html", {"request": request, "gerty": gertyData} + ) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 4151e7a00..d89a557c7 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -1,37 +1,35 @@ -import math -from http import HTTPStatus import json -import httpx -import random +import math import os +import random import time from datetime import datetime +from http import HTTPStatus + +import httpx from fastapi import Query from fastapi.params import Depends +from fastapi.templating import Jinja2Templates from lnurl import decode as decode_lnurl from loguru import logger from starlette.exceptions import HTTPException -from lnbits.core.crud import get_wallet_for_key -from lnbits.core.crud import get_user +from lnbits.core.crud import get_user, get_wallet_for_key from lnbits.core.services import create_invoice from lnbits.core.views.api import api_payment, api_wallet from lnbits.decorators import WalletTypeInfo, get_key_type, require_admin_key -from fastapi.templating import Jinja2Templates - -from .helpers import * - -from . import gerty_ext -from .crud import create_gerty, update_gerty, delete_gerty, get_gerty, get_gertys -from .models import Gerty - from lnbits.utils.exchange_rates import satoshis_amount_as_fiat + from ...settings import LNBITS_PATH +from . import gerty_ext +from .crud import create_gerty, delete_gerty, get_gerty, get_gertys, update_gerty +from .helpers import * +from .models import Gerty @gerty_ext.get("/api/v1/gerty", status_code=HTTPStatus.OK) async def api_gertys( - all_wallets: bool = Query(False), wallet: WalletTypeInfo = Depends(get_key_type) + all_wallets: bool = Query(False), wallet: WalletTypeInfo = Depends(get_key_type) ): wallet_ids = [wallet.wallet.id] if all_wallets: @@ -43,9 +41,9 @@ async def api_gertys( @gerty_ext.post("/api/v1/gerty", status_code=HTTPStatus.CREATED) @gerty_ext.put("/api/v1/gerty/{gerty_id}", status_code=HTTPStatus.OK) async def api_link_create_or_update( - data: Gerty, - wallet: WalletTypeInfo = Depends(get_key_type), - gerty_id: str = Query(None), + data: Gerty, + wallet: WalletTypeInfo = Depends(get_key_type), + gerty_id: str = Query(None), ): if gerty_id: gerty = await get_gerty(gerty_id) @@ -70,7 +68,7 @@ async def api_link_create_or_update( @gerty_ext.delete("/api/v1/gerty/{gerty_id}") async def api_gerty_delete( - gerty_id: str, wallet: WalletTypeInfo = Depends(require_admin_key) + gerty_id: str, wallet: WalletTypeInfo = Depends(require_admin_key) ): gerty = await get_gerty(gerty_id) @@ -88,10 +86,11 @@ async def api_gerty_delete( ####################### + @gerty_ext.get("/api/v1/gerty/satoshiquote", status_code=HTTPStatus.OK) async def api_gerty_satoshi(): - maxQuoteLength = 186; - with open(os.path.join(LNBITS_PATH, 'extensions/gerty/static/satoshi.json')) as fd: + maxQuoteLength = 186 + with open(os.path.join(LNBITS_PATH, "extensions/gerty/static/satoshi.json")) as fd: satoshiQuotes = json.load(fd) quote = satoshiQuotes[random.randint(0, len(satoshiQuotes) - 1)] # logger.debug(quote.text) @@ -103,10 +102,7 @@ async def api_gerty_satoshi(): @gerty_ext.get("/api/v1/gerty/{gerty_id}/{p}") -async def api_gerty_json( - gerty_id: str, - p: int = None # page number -): +async def api_gerty_json(gerty_id: str, p: int = None): # page number gerty = await get_gerty(gerty_id) if not gerty: @@ -129,7 +125,7 @@ async def api_gerty_json( logger.debug("Screeens " + str(enabled_screens)) data = await get_screen_data(p, enabled_screens, gerty) - next_screen_number = 0 if ((p + 1) >= enabled_screen_count) else p + 1; + next_screen_number = 0 if ((p + 1) >= enabled_screen_count) else p + 1 # get the sleep time sleep_time = gerty.refresh_time if gerty.refresh_time else 300 @@ -143,14 +139,14 @@ async def api_gerty_json( "requestTimestamp": get_next_update_time(sleep_time), "nextScreenNumber": next_screen_number, "showTextBoundRect": False, - "name": gerty.name + "name": gerty.name, }, "screen": { "slug": get_screen_slug_by_index(p, enabled_screens), "group": get_screen_slug_by_index(p, enabled_screens), - "title": data['title'], - "areas": data['areas'] - } + "title": data["title"], + "areas": data["areas"], + }, } @@ -163,7 +159,7 @@ def get_screen_slug_by_index(index: int, screens_list): async def get_screen_data(screen_num: int, screens_list: dict, gerty): screen_slug = get_screen_slug_by_index(screen_num, screens_list) # first get the relevant slug from the display_preferences - logger.debug('screen_slug') + logger.debug("screen_slug") logger.debug(screen_slug) areas = [] title = "" @@ -188,8 +184,8 @@ async def get_screen_data(screen_num: int, screens_list: dict, gerty): areas = await get_lightning_stats(gerty) data = {} - data['title'] = title - data['areas'] = areas + data["title"] = title + data["areas"] = areas return data @@ -208,8 +204,10 @@ async def get_dashboard(gerty): wallets = await get_lnbits_wallet_balances(gerty) text = [] for wallet in wallets: - text.append(get_text_item_dict("{0}".format(wallet['name']), 15)) - text.append(get_text_item_dict("{0} sats".format(format_number(wallet['balance'])), 20)) + text.append(get_text_item_dict("{0}".format(wallet["name"]), 15)) + text.append( + get_text_item_dict("{0} sats".format(format_number(wallet["balance"])), 20) + ) areas.append(text) # Mempool fees @@ -220,7 +218,11 @@ async def get_dashboard(gerty): # difficulty adjustment time text = [] - text.append(get_text_item_dict(await get_time_remaining_next_difficulty_adjustment(gerty), 15)) + text.append( + get_text_item_dict( + await get_time_remaining_next_difficulty_adjustment(gerty), 15 + ) + ) text.append(get_text_item_dict("until next difficulty adjustment", 12)) areas.append(text) @@ -235,18 +237,20 @@ async def get_lnbits_wallet_balances(gerty): wallet = await get_wallet_for_key(key=lnbits_wallet) logger.debug(wallet.name) if wallet: - wallets.append({ - "name": wallet.name, - "balance": wallet.balance_msat / 1000, - "inkey": wallet.inkey, - }) + wallets.append( + { + "name": wallet.name, + "balance": wallet.balance_msat / 1000, + "inkey": wallet.inkey, + } + ) return wallets async def get_placeholder_text(): return [ get_text_item_dict("Some placeholder text", 15, 10, 50), - get_text_item_dict("Some placeholder text", 15, 10, 50) + get_text_item_dict("Some placeholder text", 15, 10, 50), ] @@ -255,10 +259,12 @@ async def get_satoshi_quotes(): text = [] quote = await api_gerty_satoshi() if quote: - if quote['text']: - text.append(get_text_item_dict(quote['text'], 15)) - if quote['date']: - text.append(get_text_item_dict("Satoshi Nakamoto - {0}".format(quote['date']), 15)) + if quote["text"]: + text.append(get_text_item_dict(quote["text"], 15)) + if quote["date"]: + text.append( + get_text_item_dict("Satoshi Nakamoto - {0}".format(quote["date"]), 15) + ) return text @@ -270,7 +276,11 @@ async def get_exchange_rate(gerty): amount = await satoshis_amount_as_fiat(100000000, gerty.exchange) if amount: price = format_number(amount) - text.append(get_text_item_dict("Current {0}/BTC price".format(gerty.exchange), 15)) + text.append( + get_text_item_dict( + "Current {0}/BTC price".format(gerty.exchange), 15 + ) + ) text.append(get_text_item_dict(price, 80)) except: pass @@ -281,29 +291,43 @@ async def get_onchain_dashboard(gerty): areas = [] if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: - r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") + r = await client.get( + gerty.mempool_endpoint + "/api/v1/difficulty-adjustment" + ) text = [] - stat = round(r.json()['progressPercent']) - text.append(get_text_item_dict("Progress through current difficulty epoch", 12)) + stat = round(r.json()["progressPercent"]) + text.append( + get_text_item_dict("Progress through current difficulty epoch", 12) + ) text.append(get_text_item_dict("{0}%".format(stat), 20)) areas.append(text) text = [] - stat = r.json()['estimatedRetargetDate'] + stat = r.json()["estimatedRetargetDate"] dt = datetime.fromtimestamp(stat / 1000).strftime("%e %b %Y at %H:%M") - text.append(get_text_item_dict("Estimated date of next difficulty adjustment", 12)) + text.append( + get_text_item_dict("Estimated date of next difficulty adjustment", 12) + ) text.append(get_text_item_dict(dt, 20)) areas.append(text) text = [] - stat = r.json()['remainingBlocks'] - text.append(get_text_item_dict("Blocks remaining until next difficulty adjustment", 12)) + stat = r.json()["remainingBlocks"] + text.append( + get_text_item_dict( + "Blocks remaining until next difficulty adjustment", 12 + ) + ) text.append(get_text_item_dict("{0}".format(format_number(stat)), 20)) areas.append(text) text = [] - stat = r.json()['remainingTime'] - text.append(get_text_item_dict("Blocks remaining until next difficulty adjustment", 12)) + stat = r.json()["remainingTime"] + text.append( + get_text_item_dict( + "Blocks remaining until next difficulty adjustment", 12 + ) + ) text.append(get_text_item_dict(get_time_remaining(stat / 1000, 4), 20)) areas.append(text) @@ -313,8 +337,10 @@ async def get_onchain_dashboard(gerty): async def get_time_remaining_next_difficulty_adjustment(gerty): if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: - r = await client.get(gerty.mempool_endpoint + "/api/v1/difficulty-adjustment") - stat = r.json()['remainingTime'] + r = await client.get( + gerty.mempool_endpoint + "/api/v1/difficulty-adjustment" + ) + stat = r.json()["remainingTime"] time = get_time_remaining(stat / 1000, 3) return time @@ -331,17 +357,15 @@ async def get_mempool_stat(stat_slug: str, gerty): text = [] if isinstance(gerty.mempool_endpoint, str): async with httpx.AsyncClient() as client: - if ( - stat_slug == "mempool_tx_count" - ): + if stat_slug == "mempool_tx_count": r = await client.get(gerty.mempool_endpoint + "/api/mempool") if stat_slug == "mempool_tx_count": - stat = round(r.json()['count']) + stat = round(r.json()["count"]) text.append(get_text_item_dict("Transactions in the mempool", 15)) - text.append(get_text_item_dict("{0}".format(format_number(stat)), 80)) - elif ( - stat_slug == "mempool_recommended_fees" - ): + text.append( + get_text_item_dict("{0}".format(format_number(stat)), 80) + ) + elif stat_slug == "mempool_recommended_fees": y_offset = 60 fees = await get_mempool_recommended_fees(gerty) pos_y = 80 + y_offset @@ -350,35 +374,75 @@ async def get_mempool_stat(stat_slug: str, gerty): text.append(get_text_item_dict("Recommended Tx Fees", 20, 240, pos_y)) pos_y = 280 + y_offset - text.append(get_text_item_dict("{0}".format("No Priority"), 15, 30, pos_y)) - text.append(get_text_item_dict("{0}".format("Low Priority"), 15, 235, pos_y)) - text.append(get_text_item_dict("{0}".format("Medium Priority"), 15, 460, pos_y)) - text.append(get_text_item_dict("{0}".format("High Priority"), 15, 750, pos_y)) + text.append( + get_text_item_dict("{0}".format("No Priority"), 15, 30, pos_y) + ) + text.append( + get_text_item_dict("{0}".format("Low Priority"), 15, 235, pos_y) + ) + text.append( + get_text_item_dict("{0}".format("Medium Priority"), 15, 460, pos_y) + ) + text.append( + get_text_item_dict("{0}".format("High Priority"), 15, 750, pos_y) + ) pos_y = 340 + y_offset font_size = 15 fee_append = "/vB" fee_rate = fees["economyFee"] - text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), - font_size, - 30, pos_y)) + text.append( + get_text_item_dict( + "{0} {1}{2}".format( + format_number(fee_rate), + ("sat" if fee_rate == 1 else "sats"), + fee_append, + ), + font_size, + 30, + pos_y, + ) + ) fee_rate = fees["hourFee"] - text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), - font_size, - 235, pos_y)) + text.append( + get_text_item_dict( + "{0} {1}{2}".format( + format_number(fee_rate), + ("sat" if fee_rate == 1 else "sats"), + fee_append, + ), + font_size, + 235, + pos_y, + ) + ) fee_rate = fees["halfHourFee"] - text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), - font_size, - 460, pos_y)) + text.append( + get_text_item_dict( + "{0} {1}{2}".format( + format_number(fee_rate), + ("sat" if fee_rate == 1 else "sats"), + fee_append, + ), + font_size, + 460, + pos_y, + ) + ) fee_rate = fees["fastestFee"] - text.append(get_text_item_dict( - "{0} {1}{2}".format(format_number(fee_rate), ("sat" if fee_rate == 1 else "sats"), fee_append), - font_size, - 750, pos_y)) + text.append( + get_text_item_dict( + "{0} {1}{2}".format( + format_number(fee_rate), + ("sat" if fee_rate == 1 else "sats"), + fee_append, + ), + font_size, + 750, + pos_y, + ) + ) return text diff --git a/tests/core/views/test_api.py b/tests/core/views/test_api.py index 81468fd1f..c62801535 100644 --- a/tests/core/views/test_api.py +++ b/tests/core/views/test_api.py @@ -3,10 +3,9 @@ import hashlib import pytest from lnbits import bolt11 -from lnbits.core.views.api import ( - api_payment, -) +from lnbits.core.views.api import api_payment from lnbits.settings import wallet_class + from ...helpers import get_random_invoice_data, is_regtest diff --git a/tests/extensions/boltz/conftest.py b/tests/extensions/boltz/conftest.py index 1bd1c638d..930a1bfb1 100644 --- a/tests/extensions/boltz/conftest.py +++ b/tests/extensions/boltz/conftest.py @@ -1,9 +1,7 @@ import pytest_asyncio from lnbits.extensions.boltz.boltz import create_reverse_swap -from lnbits.extensions.boltz.models import ( - CreateReverseSubmarineSwap, -) +from lnbits.extensions.boltz.models import CreateReverseSubmarineSwap @pytest_asyncio.fixture(scope="session") diff --git a/tests/mocks.py b/tests/mocks.py index 7e2df4f70..3fc0efae2 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -4,6 +4,7 @@ from lnbits import bolt11 from lnbits.settings import WALLET from lnbits.wallets.base import PaymentResponse, PaymentStatus, StatusResponse from lnbits.wallets.fake import FakeWallet + from .helpers import get_random_string, is_fake From f636951d3ce056c1775d22d4102677e325eb6ef4 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 16:40:33 +0100 Subject: [PATCH 46/57] Added gerty migration for UTC offset col --- lnbits/extensions/gerty/migrations.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lnbits/extensions/gerty/migrations.py b/lnbits/extensions/gerty/migrations.py index 0e15b68e2..61722835f 100644 --- a/lnbits/extensions/gerty/migrations.py +++ b/lnbits/extensions/gerty/migrations.py @@ -16,3 +16,11 @@ async def m001_initial(db): ); """ ) + +async def m002_add_utc_offset_col(db): + """ + support for UTC offset + """ + await db.execute( + "ALTER TABLE gerty.gertys ADD COLUMN utc_offset INT;" + ) From ecfd4b3d66927c87489866a906bb448ed53828f3 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 16:58:14 +0100 Subject: [PATCH 47/57] Added UTC field to gerty modal --- lnbits/extensions/gerty/crud.py | 8 ++++-- lnbits/extensions/gerty/models.py | 1 + .../gerty/templates/gerty/index.html | 28 +++++++++++++------ 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/lnbits/extensions/gerty/crud.py b/lnbits/extensions/gerty/crud.py index 10b17df1e..3850737f0 100644 --- a/lnbits/extensions/gerty/crud.py +++ b/lnbits/extensions/gerty/crud.py @@ -14,21 +14,25 @@ async def create_gerty(wallet_id: str, data: Gerty) -> Gerty: id, name, wallet, + utc_offset, lnbits_wallets, mempool_endpoint, exchange, - display_preferences + display_preferences, + refresh_time ) - VALUES (?, ?, ?, ?, ?, ?, ?) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) """, ( gerty_id, data.name, data.wallet, + data.utc_offset, data.lnbits_wallets, data.mempool_endpoint, data.exchange, data.display_preferences, + data.refresh_time, ), ) diff --git a/lnbits/extensions/gerty/models.py b/lnbits/extensions/gerty/models.py index 89707a86a..855a30c9c 100644 --- a/lnbits/extensions/gerty/models.py +++ b/lnbits/extensions/gerty/models.py @@ -10,6 +10,7 @@ class Gerty(BaseModel): name: str wallet: str refresh_time: int = Query(None) + utc_offset: int = Query(None) lnbits_wallets: str = Query( None ) # Wallets to keep an eye on, {"wallet-id": "wallet-read-key, etc"} diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 64afc6832..dc6cea2aa 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -179,6 +179,17 @@ > + + Enter a UTC time offset value (e.g. -1) + +

Use the toggles below to control what your Gerty will display

Date: Thu, 20 Oct 2022 17:01:03 +0100 Subject: [PATCH 48/57] Fix link to API --- lnbits/extensions/gerty/helpers.py | 2 +- lnbits/extensions/gerty/templates/gerty/index.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index b2c757a68..c30553570 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -194,7 +194,7 @@ async def get_lightning_stats(gerty): def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/London"): - utc_now = pytz.utc.localize(datetime.utcnow()) + utc_now = datetime.utcnow() next_refresh_time = utc_now + timedelta(0, sleep_time_seconds) local_refresh_time = next_refresh_time.astimezone(pytz.timezone(timezone)) return "{0} {1}".format( diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index dc6cea2aa..10967fe5b 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -263,7 +263,7 @@ ) obj.fsat = new Intl.NumberFormat(LOCALE).format(obj.amount) obj.gerty = ['/gerty/', obj.id].join('') - obj.gertyJson = ['/gerty/api/v1/gerty/', obj.id].join('') + obj.gertyJson = ['/gerty/api/v1/gerty/', obj.id, '/0'].join('') return obj } From 82d4933d74b71b8461f0a14aaa915d05cee9d627 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 17:01:21 +0100 Subject: [PATCH 49/57] Fix link to API --- lnbits/extensions/gerty/templates/gerty/index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/templates/gerty/index.html b/lnbits/extensions/gerty/templates/gerty/index.html index 10967fe5b..3c258c1c8 100644 --- a/lnbits/extensions/gerty/templates/gerty/index.html +++ b/lnbits/extensions/gerty/templates/gerty/index.html @@ -69,7 +69,7 @@ :href="props.row.gertyJson" target="_blank" > - Launch software Gerty + View Gerty API From 65a3e4feb29a5ea0a28213efbaa12ac3d0512b39 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 17:04:34 +0100 Subject: [PATCH 50/57] Font size changes on onchain data dashboard --- lnbits/extensions/gerty/helpers.py | 2 +- lnbits/extensions/gerty/views_api.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index c30553570..336094b39 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -102,7 +102,7 @@ async def get_mining_dashboard(gerty): text = [] stat = r.json()["remainingTime"] text.append(get_text_item_dict("Time to next difficulty adjustment", 12)) - text.append(get_text_item_dict(get_time_remaining(stat / 1000, 3), 20)) + text.append(get_text_item_dict(get_time_remaining(stat / 1000, 3), 12)) areas.append(text) # difficultyChange diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index d89a557c7..9962a99d3 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -297,16 +297,16 @@ async def get_onchain_dashboard(gerty): text = [] stat = round(r.json()["progressPercent"]) text.append( - get_text_item_dict("Progress through current difficulty epoch", 12) + get_text_item_dict("Progress through current epoch", 12) ) - text.append(get_text_item_dict("{0}%".format(stat), 20)) + text.append(get_text_item_dict("{0}%".format(stat), 60)) areas.append(text) text = [] stat = r.json()["estimatedRetargetDate"] dt = datetime.fromtimestamp(stat / 1000).strftime("%e %b %Y at %H:%M") text.append( - get_text_item_dict("Estimated date of next difficulty adjustment", 12) + get_text_item_dict("Date of next difficulty adjustment", 12) ) text.append(get_text_item_dict(dt, 20)) areas.append(text) @@ -315,20 +315,20 @@ async def get_onchain_dashboard(gerty): stat = r.json()["remainingBlocks"] text.append( get_text_item_dict( - "Blocks remaining until next difficulty adjustment", 12 + "Blocks until next adjustment", 12 ) ) - text.append(get_text_item_dict("{0}".format(format_number(stat)), 20)) + text.append(get_text_item_dict("{0}".format(format_number(stat)), 60)) areas.append(text) text = [] stat = r.json()["remainingTime"] text.append( get_text_item_dict( - "Blocks remaining until next difficulty adjustment", 12 + "Blocks until next adjustment", 12 ) ) - text.append(get_text_item_dict(get_time_remaining(stat / 1000, 4), 20)) + text.append(get_text_item_dict(get_time_remaining(stat / 1000, 4), 60)) areas.append(text) return areas From e193c4ef4736be2637f08c498429ce51bc5c9851 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 17:15:12 +0100 Subject: [PATCH 51/57] Replaced pytz with utc_offset --- lnbits/extensions/gerty/helpers.py | 17 ++++++++--------- lnbits/extensions/gerty/views_api.py | 5 +++-- poetry.lock | 14 +------------- pyproject.toml | 1 - requirements.txt | 1 - 5 files changed, 12 insertions(+), 26 deletions(-) diff --git a/lnbits/extensions/gerty/helpers.py b/lnbits/extensions/gerty/helpers.py index 336094b39..d7e0e9511 100644 --- a/lnbits/extensions/gerty/helpers.py +++ b/lnbits/extensions/gerty/helpers.py @@ -2,7 +2,6 @@ import textwrap from datetime import datetime, timedelta import httpx -import pytz from loguru import logger from .number_prefixer import * @@ -95,7 +94,7 @@ async def get_mining_dashboard(gerty): text = [] progress = "{0}%".format(round(r.json()["progressPercent"], 2)) text.append(get_text_item_dict("Progress through current epoch", 12)) - text.append(get_text_item_dict(progress, 20)) + text.append(get_text_item_dict(progress, 60)) areas.append(text) # difficulty adjustment @@ -114,7 +113,7 @@ async def get_mining_dashboard(gerty): "{0}{1}%".format( "+" if difficultyChange > 0 else "", round(difficultyChange, 2) ), - 20, + 60, ) ) areas.append(text) @@ -193,19 +192,19 @@ async def get_lightning_stats(gerty): return areas -def get_next_update_time(sleep_time_seconds: int = 0, timezone: str = "Europe/London"): +def get_next_update_time(sleep_time_seconds: int = 0, utc_offset: int = 0): utc_now = datetime.utcnow() next_refresh_time = utc_now + timedelta(0, sleep_time_seconds) - local_refresh_time = next_refresh_time.astimezone(pytz.timezone(timezone)) + local_refresh_time = next_refresh_time + timedelta(hours=utc_offset) return "{0} {1}".format( - "I'll wake up at" if gerty_should_sleep() else "Next update at", + "I'll wake up at" if gerty_should_sleep(utc_offset) else "Next update at", local_refresh_time.strftime("%H:%M on %e %b %Y"), ) -def gerty_should_sleep(timezone: str = "Europe/London"): - utc_now = pytz.utc.localize(datetime.utcnow()) - local_time = utc_now.astimezone(pytz.timezone(timezone)) +def gerty_should_sleep(utc_offset: int = 0): + utc_now = datetime.utcnow() + local_time = utc_now + timedelta(hours=utc_offset) hours = local_time.strftime("%H") hours = int(hours) logger.debug("HOURS") diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 9962a99d3..51800730e 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -129,14 +129,15 @@ async def api_gerty_json(gerty_id: str, p: int = None): # page number # get the sleep time sleep_time = gerty.refresh_time if gerty.refresh_time else 300 - if gerty_should_sleep(): + utc_offset = gerty.utc_offset if gerty.utc_offset else 0 + if gerty_should_sleep(utc_offset): sleep_time_hours = 8 sleep_time = 60 * 60 * sleep_time_hours return { "settings": { "refreshTime": sleep_time, - "requestTimestamp": get_next_update_time(sleep_time), + "requestTimestamp": get_next_update_time(sleep_time, utc_offset), "nextScreenNumber": next_screen_number, "showTextBoundRect": False, "name": gerty.name, diff --git a/poetry.lock b/poetry.lock index 8e9541fcc..d5e61070d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -664,14 +664,6 @@ python-versions = ">=3.5" [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "pytz" -version = "2022.4" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "PyYAML" version = "5.4.1" @@ -933,7 +925,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black ( [metadata] lock-version = "1.1" python-versions = "^3.10 | ^3.9 | ^3.8 | ^3.7" -content-hash = "401fa2739c9209df26cb1b2defaf90c5a4fcdafacc8eb2627f8d324857870281" +content-hash = "2db4d8b644c07a599b10ecdd1d532f8fce5dea7afa0332cbebc9a37223f79ed4" [metadata.files] aiofiles = [ @@ -1466,10 +1458,6 @@ python-dotenv = [ {file = "python-dotenv-0.19.0.tar.gz", hash = "sha256:f521bc2ac9a8e03c736f62911605c5d83970021e3fa95b37d769e2bbbe9b6172"}, {file = "python_dotenv-0.19.0-py2.py3-none-any.whl", hash = "sha256:aae25dc1ebe97c420f50b81fb0e5c949659af713f31fdb63c749ca68748f34b1"}, ] -pytz = [ - {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, - {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, -] PyYAML = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, diff --git a/pyproject.toml b/pyproject.toml index 0484ca9e8..e95c6a2ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,6 @@ zipp = "3.5.0" loguru = "0.5.3" cffi = "1.15.0" websocket-client = "1.3.3" -pytz = "^2022.4" [tool.poetry.dev-dependencies] isort = "^5.10.1" diff --git a/requirements.txt b/requirements.txt index fd213cebf..697ea1d4d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -33,7 +33,6 @@ pyparsing==3.0.9 pypng==0.20220715.0 pyqrcode==1.2.1 pyscss==1.4.0 -pytz=2022.4 python-dotenv==0.20.0 pyyaml==6.0 represent==1.6.0.post0 From aa33f74f5d70284fdaccc3ee1d45b588a2afef88 Mon Sep 17 00:00:00 2001 From: Black Coffee Date: Thu, 20 Oct 2022 17:20:47 +0100 Subject: [PATCH 52/57] Bug fix wwhen trying to get a gerty screen that doesnt exist --- lnbits/extensions/gerty/views_api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lnbits/extensions/gerty/views_api.py b/lnbits/extensions/gerty/views_api.py index 51800730e..05a7f5d75 100644 --- a/lnbits/extensions/gerty/views_api.py +++ b/lnbits/extensions/gerty/views_api.py @@ -153,7 +153,10 @@ async def api_gerty_json(gerty_id: str, p: int = None): # page number # Get a screen slug by its position in the screens_list def get_screen_slug_by_index(index: int, screens_list): - return list(screens_list)[index] + if(index < len(screens_list) - 1): + return list(screens_list)[index] + else: + return None # Get a list of text items for the screen number From 3814989b3a142baaea1d010f7022882b72280f2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20Pha=CC=A3m-Bachelart?= Date: Mon, 24 Oct 2022 11:46:32 +0200 Subject: [PATCH 53/57] Add cliche wallet (#1071) --- docs/guide/wallets.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/guide/wallets.md b/docs/guide/wallets.md index 80fb54c04..10724f34b 100644 --- a/docs/guide/wallets.md +++ b/docs/guide/wallets.md @@ -79,3 +79,8 @@ For the invoice to work you must have a publicly accessible URL in your LNbits. - `LNBITS_BACKEND_WALLET_CLASS`: **OpenNodeWallet** - `OPENNODE_API_ENDPOINT`: https://api.opennode.com/ - `OPENNODE_KEY`: opennodeAdminApiKey + + +### Cliche Wallet + +- `CLICHE_ENDPOINT`: ws://127.0.0.1:12000 From fc1a87f617a60de29552c8196a7f08050bb60ce5 Mon Sep 17 00:00:00 2001 From: Aaron Dewes Date: Mon, 24 Oct 2022 11:50:52 +0200 Subject: [PATCH 54/57] Remove unused proxy_fix (#1007) --- lnbits/app.py | 1 - lnbits/proxy_fix.py | 95 --------------------------------------------- 2 files changed, 96 deletions(-) delete mode 100644 lnbits/proxy_fix.py diff --git a/lnbits/app.py b/lnbits/app.py index 8b9cf7985..075828ef0 100644 --- a/lnbits/app.py +++ b/lnbits/app.py @@ -91,7 +91,6 @@ def create_app(config_object="lnbits.settings") -> FastAPI: ) app.add_middleware(GZipMiddleware, minimum_size=1000) - # app.add_middleware(ASGIProxyFix) check_funding_source(app) register_assets(app) diff --git a/lnbits/proxy_fix.py b/lnbits/proxy_fix.py deleted file mode 100644 index 897835e0d..000000000 --- a/lnbits/proxy_fix.py +++ /dev/null @@ -1,95 +0,0 @@ -from functools import partial -from typing import Callable, List, Optional -from urllib.parse import urlparse -from urllib.request import parse_http_list as _parse_list_header - -from quart import Request -from quart_trio.asgi import TrioASGIHTTPConnection -from werkzeug.datastructures import Headers - - -class ASGIProxyFix(TrioASGIHTTPConnection): - def _create_request_from_scope(self, send: Callable) -> Request: - headers = Headers() - headers["Remote-Addr"] = (self.scope.get("client") or [""])[0] - for name, value in self.scope["headers"]: - headers.add(name.decode("latin1").title(), value.decode("latin1")) - if self.scope["http_version"] < "1.1": - headers.setdefault("Host", self.app.config["SERVER_NAME"] or "") - - path = self.scope["path"] - path = path if path[0] == "/" else urlparse(path).path - - x_proto = self._get_real_value(1, headers.get("X-Forwarded-Proto")) - if x_proto: - self.scope["scheme"] = x_proto - - x_host = self._get_real_value(1, headers.get("X-Forwarded-Host")) - if x_host: - headers["host"] = x_host.lower() - - return self.app.request_class( - self.scope["method"], - self.scope["scheme"], - path, - self.scope["query_string"], - headers, - self.scope.get("root_path", ""), - self.scope["http_version"], - max_content_length=self.app.config["MAX_CONTENT_LENGTH"], - body_timeout=self.app.config["BODY_TIMEOUT"], - send_push_promise=partial(self._send_push_promise, send), - scope=self.scope, - ) - - def _get_real_value(self, trusted: int, value: Optional[str]) -> Optional[str]: - """Get the real value from a list header based on the configured - number of trusted proxies. - :param trusted: Number of values to trust in the header. - :param value: Comma separated list header value to parse. - :return: The real value, or ``None`` if there are fewer values - than the number of trusted proxies. - .. versionchanged:: 1.0 - Renamed from ``_get_trusted_comma``. - .. versionadded:: 0.15 - """ - if not (trusted and value): - return None - - values = self.parse_list_header(value) - if len(values) >= trusted: - return values[-trusted] - - return None - - def parse_list_header(self, value: str) -> List[str]: - result = [] - for item in _parse_list_header(value): - if item[:1] == item[-1:] == '"': - item = self.unquote_header_value(item[1:-1]) - result.append(item) - return result - - def unquote_header_value(self, value: str, is_filename: bool = False) -> str: - r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). - This does not use the real unquoting but what browsers are actually - using for quoting. - .. versionadded:: 0.5 - :param value: the header value to unquote. - :param is_filename: The value represents a filename or path. - """ - if value and value[0] == value[-1] == '"': - # this is not the real unquoting, but fixing this so that the - # RFC is met will result in bugs with internet explorer and - # probably some other browsers as well. IE for example is - # uploading files with "C:\foo\bar.txt" as filename - value = value[1:-1] - - # if this is a filename and the starting characters look like - # a UNC path, then just return the value without quotes. Using the - # replace sequence below on a UNC path has the effect of turning - # the leading double slash into a single slash and then - # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != "\\\\": - return value.replace("\\\\", "\\").replace('\\"', '"') - return value From 193d037ad6aa102152a824611c65a96a6ff876df Mon Sep 17 00:00:00 2001 From: Anton Kovalenko Date: Mon, 24 Oct 2022 13:23:11 +0300 Subject: [PATCH 55/57] Allow more than 2-nd level domain in incoming ln-addresses (#914) --- lnbits/core/views/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lnbits/core/views/api.py b/lnbits/core/views/api.py index c07df568a..983d5a261 100644 --- a/lnbits/core/views/api.py +++ b/lnbits/core/views/api.py @@ -476,7 +476,7 @@ async def api_lnurlscan(code: str, wallet: WalletTypeInfo = Depends(get_key_type except: # parse internet identifier (user@domain.com) name_domain = code.split("@") - if len(name_domain) == 2 and len(name_domain[1].split(".")) == 2: + if len(name_domain) == 2 and len(name_domain[1].split(".")) >= 2: name, domain = name_domain url = ( ("http://" if domain.endswith(".onion") else "https://") From 09cf654427d8f42baee484a5146a180b1261b1e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?dni=20=E2=9A=A1?= Date: Mon, 24 Oct 2022 12:32:20 +0200 Subject: [PATCH 56/57] Proper Error Handling for qrcode-stream components (#1076) * proper errorhandling for camera * fix javascript for calle --- lnbits/core/static/js/wallet.js | 29 ++++++++++++++++++++++++++ lnbits/core/templates/core/wallet.html | 2 ++ 2 files changed, 31 insertions(+) diff --git a/lnbits/core/static/js/wallet.js b/lnbits/core/static/js/wallet.js index 76d82ad4c..668013135 100644 --- a/lnbits/core/static/js/wallet.js +++ b/lnbits/core/static/js/wallet.js @@ -361,6 +361,35 @@ new Vue({ this.receive.status = 'pending' }) }, + onInitQR: async function (promise) { + try { + await promise + } catch (error) { + let mapping = { + NotAllowedError: 'ERROR: you need to grant camera access permission', + NotFoundError: 'ERROR: no camera on this device', + NotSupportedError: + 'ERROR: secure context required (HTTPS, localhost)', + NotReadableError: 'ERROR: is the camera already in use?', + OverconstrainedError: 'ERROR: installed cameras are not suitable', + StreamApiNotSupportedError: + 'ERROR: Stream API is not supported in this browser', + InsecureContextError: + 'ERROR: Camera access is only permitted in secure context. Use HTTPS or localhost rather than HTTP.' + } + let valid_error = Object.keys(mapping).filter(key => { + return error.name === key + }) + let camera_error = valid_error + ? mapping[valid_error] + : `ERROR: Camera error (${error.name})` + this.parse.camera.show = false + this.$q.notify({ + message: camera_error, + type: 'negative' + }) + } + }, decodeQR: function (res) { this.parse.data.request = res this.decodeRequest() diff --git a/lnbits/core/templates/core/wallet.html b/lnbits/core/templates/core/wallet.html index bccdc2b48..4bf6067c0 100644 --- a/lnbits/core/templates/core/wallet.html +++ b/lnbits/core/templates/core/wallet.html @@ -653,6 +653,7 @@ @@ -671,6 +672,7 @@
From 09871bbabc27b69886d56770178b19f445b7c48f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?dni=20=E2=9A=A1?= Date: Mon, 24 Oct 2022 16:29:30 +0200 Subject: [PATCH 57/57] fix mypy for extensions (#873) * explicitly exclude all extensions from mypy * fix example extension mypy * fix subdomains extension mypy + 1 type error fixed * fix mypy discordbot * mypy check copilot extensnion * copilot black * add invoices ext to ignore * add boltz and boltcard * copilit id is necessary * was discordbot is ok Co-authored-by: dni --- lnbits/extensions/copilot/crud.py | 8 ++--- lnbits/extensions/copilot/tasks.py | 20 ++++++------ lnbits/extensions/copilot/views.py | 8 +++-- lnbits/extensions/copilot/views_api.py | 9 +++--- lnbits/extensions/discordbot/crud.py | 6 ++-- lnbits/extensions/discordbot/views.py | 4 ++- lnbits/extensions/discordbot/views_api.py | 39 +++++++++++++---------- lnbits/extensions/example/views.py | 5 ++- lnbits/extensions/subdomains/crud.py | 4 +-- lnbits/extensions/subdomains/models.py | 30 ++++++++--------- lnbits/extensions/subdomains/tasks.py | 4 +-- lnbits/extensions/subdomains/views.py | 4 ++- lnbits/extensions/subdomains/views_api.py | 25 +++++++++++---- pyproject.toml | 30 +++++++++++++++-- 14 files changed, 125 insertions(+), 71 deletions(-) diff --git a/lnbits/extensions/copilot/crud.py b/lnbits/extensions/copilot/crud.py index d0da044eb..5ecb5cd45 100644 --- a/lnbits/extensions/copilot/crud.py +++ b/lnbits/extensions/copilot/crud.py @@ -10,7 +10,7 @@ from .models import Copilots, CreateCopilotData async def create_copilot( data: CreateCopilotData, inkey: Optional[str] = "" -) -> Copilots: +) -> Optional[Copilots]: copilot_id = urlsafe_short_hash() await db.execute( """ @@ -67,19 +67,19 @@ async def create_copilot( async def update_copilot( - data: CreateCopilotData, copilot_id: Optional[str] = "" + data: CreateCopilotData, copilot_id: str ) -> Optional[Copilots]: q = ", ".join([f"{field[0]} = ?" for field in data]) items = [f"{field[1]}" for field in data] items.append(copilot_id) - await db.execute(f"UPDATE copilot.newer_copilots SET {q} WHERE id = ?", (items)) + await db.execute(f"UPDATE copilot.newer_copilots SET {q} WHERE id = ?", (items,)) row = await db.fetchone( "SELECT * FROM copilot.newer_copilots WHERE id = ?", (copilot_id,) ) return Copilots(**row) if row else None -async def get_copilot(copilot_id: str) -> Copilots: +async def get_copilot(copilot_id: str) -> Optional[Copilots]: row = await db.fetchone( "SELECT * FROM copilot.newer_copilots WHERE id = ?", (copilot_id,) ) diff --git a/lnbits/extensions/copilot/tasks.py b/lnbits/extensions/copilot/tasks.py index c59ef4cc8..48ad7813d 100644 --- a/lnbits/extensions/copilot/tasks.py +++ b/lnbits/extensions/copilot/tasks.py @@ -26,7 +26,7 @@ async def wait_for_paid_invoices(): async def on_invoice_paid(payment: Payment) -> None: webhook = None data = None - if payment.extra.get("tag") != "copilot": + if not payment.extra or payment.extra.get("tag") != "copilot": # not an copilot invoice return @@ -71,12 +71,12 @@ async def on_invoice_paid(payment: Payment) -> None: async def mark_webhook_sent(payment: Payment, status: int) -> None: - payment.extra["wh_status"] = status - - await core_db.execute( - """ - UPDATE apipayments SET extra = ? - WHERE hash = ? - """, - (json.dumps(payment.extra), payment.payment_hash), - ) + if payment.extra: + payment.extra["wh_status"] = status + await core_db.execute( + """ + UPDATE apipayments SET extra = ? + WHERE hash = ? + """, + (json.dumps(payment.extra), payment.payment_hash), + ) diff --git a/lnbits/extensions/copilot/views.py b/lnbits/extensions/copilot/views.py index 7ee7f590e..b4a2354a0 100644 --- a/lnbits/extensions/copilot/views.py +++ b/lnbits/extensions/copilot/views.py @@ -15,7 +15,9 @@ templates = Jinja2Templates(directory="templates") @copilot_ext.get("/", response_class=HTMLResponse) -async def index(request: Request, user: User = Depends(check_user_exists)): +async def index( + request: Request, user: User = Depends(check_user_exists) # type: ignore +): return copilot_renderer().TemplateResponse( "copilot/index.html", {"request": request, "user": user.dict()} ) @@ -44,7 +46,7 @@ class ConnectionManager: async def connect(self, websocket: WebSocket, copilot_id: str): await websocket.accept() - websocket.id = copilot_id + websocket.id = copilot_id # type: ignore self.active_connections.append(websocket) def disconnect(self, websocket: WebSocket): @@ -52,7 +54,7 @@ class ConnectionManager: async def send_personal_message(self, message: str, copilot_id: str): for connection in self.active_connections: - if connection.id == copilot_id: + if connection.id == copilot_id: # type: ignore await connection.send_text(message) async def broadcast(self, message: str): diff --git a/lnbits/extensions/copilot/views_api.py b/lnbits/extensions/copilot/views_api.py index 91b0572a5..46611a2ea 100644 --- a/lnbits/extensions/copilot/views_api.py +++ b/lnbits/extensions/copilot/views_api.py @@ -23,7 +23,7 @@ from .views import updater @copilot_ext.get("/api/v1/copilot") async def api_copilots_retrieve( - req: Request, wallet: WalletTypeInfo = Depends(get_key_type) + req: Request, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): wallet_user = wallet.wallet.user copilots = [copilot.dict() for copilot in await get_copilots(wallet_user)] @@ -37,7 +37,7 @@ async def api_copilots_retrieve( async def api_copilot_retrieve( req: Request, copilot_id: str = Query(None), - wallet: WalletTypeInfo = Depends(get_key_type), + wallet: WalletTypeInfo = Depends(get_key_type), # type: ignore ): copilot = await get_copilot(copilot_id) if not copilot: @@ -54,7 +54,7 @@ async def api_copilot_retrieve( async def api_copilot_create_or_update( data: CreateCopilotData, copilot_id: str = Query(None), - wallet: WalletTypeInfo = Depends(require_admin_key), + wallet: WalletTypeInfo = Depends(require_admin_key), # type: ignore ): data.user = wallet.wallet.user data.wallet = wallet.wallet.id @@ -67,7 +67,8 @@ async def api_copilot_create_or_update( @copilot_ext.delete("/api/v1/copilot/{copilot_id}") async def api_copilot_delete( - copilot_id: str = Query(None), wallet: WalletTypeInfo = Depends(require_admin_key) + copilot_id: str = Query(None), + wallet: WalletTypeInfo = Depends(require_admin_key), # type: ignore ): copilot = await get_copilot(copilot_id) diff --git a/lnbits/extensions/discordbot/crud.py b/lnbits/extensions/discordbot/crud.py index 5661fcb4d..629a5c004 100644 --- a/lnbits/extensions/discordbot/crud.py +++ b/lnbits/extensions/discordbot/crud.py @@ -98,21 +98,21 @@ async def get_discordbot_wallet(wallet_id: str) -> Optional[Wallets]: return Wallets(**row) if row else None -async def get_discordbot_wallets(admin_id: str) -> Optional[Wallets]: +async def get_discordbot_wallets(admin_id: str) -> List[Wallets]: rows = await db.fetchall( "SELECT * FROM discordbot.wallets WHERE admin = ?", (admin_id,) ) return [Wallets(**row) for row in rows] -async def get_discordbot_users_wallets(user_id: str) -> Optional[Wallets]: +async def get_discordbot_users_wallets(user_id: str) -> List[Wallets]: rows = await db.fetchall( """SELECT * FROM discordbot.wallets WHERE "user" = ?""", (user_id,) ) return [Wallets(**row) for row in rows] -async def get_discordbot_wallet_transactions(wallet_id: str) -> Optional[Payment]: +async def get_discordbot_wallet_transactions(wallet_id: str) -> List[Payment]: return await get_payments( wallet_id=wallet_id, complete=True, pending=False, outgoing=True, incoming=True ) diff --git a/lnbits/extensions/discordbot/views.py b/lnbits/extensions/discordbot/views.py index a5395e21f..ec7d18cc3 100644 --- a/lnbits/extensions/discordbot/views.py +++ b/lnbits/extensions/discordbot/views.py @@ -9,7 +9,9 @@ from . import discordbot_ext, discordbot_renderer @discordbot_ext.get("/", response_class=HTMLResponse) -async def index(request: Request, user: User = Depends(check_user_exists)): +async def index( + request: Request, user: User = Depends(check_user_exists) # type: ignore +): return discordbot_renderer().TemplateResponse( "discordbot/index.html", {"request": request, "user": user.dict()} ) diff --git a/lnbits/extensions/discordbot/views_api.py b/lnbits/extensions/discordbot/views_api.py index 6f213a89a..e6d004dbb 100644 --- a/lnbits/extensions/discordbot/views_api.py +++ b/lnbits/extensions/discordbot/views_api.py @@ -27,32 +27,37 @@ from .models import CreateUserData, CreateUserWallet @discordbot_ext.get("/api/v1/users", status_code=HTTPStatus.OK) -async def api_discordbot_users(wallet: WalletTypeInfo = Depends(get_key_type)): +async def api_discordbot_users( + wallet: WalletTypeInfo = Depends(get_key_type), # type: ignore +): user_id = wallet.wallet.user return [user.dict() for user in await get_discordbot_users(user_id)] @discordbot_ext.get("/api/v1/users/{user_id}", status_code=HTTPStatus.OK) -async def api_discordbot_user(user_id, wallet: WalletTypeInfo = Depends(get_key_type)): +async def api_discordbot_user( + user_id, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore +): user = await get_discordbot_user(user_id) - return user.dict() + if user: + return user.dict() @discordbot_ext.post("/api/v1/users", status_code=HTTPStatus.CREATED) async def api_discordbot_users_create( - data: CreateUserData, wallet: WalletTypeInfo = Depends(get_key_type) + data: CreateUserData, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): user = await create_discordbot_user(data) full = user.dict() - full["wallets"] = [ - wallet.dict() for wallet in await get_discordbot_users_wallets(user.id) - ] + wallets = await get_discordbot_users_wallets(user.id) + if wallets: + full["wallets"] = [wallet for wallet in wallets] return full @discordbot_ext.delete("/api/v1/users/{user_id}") async def api_discordbot_users_delete( - user_id, wallet: WalletTypeInfo = Depends(get_key_type) + user_id, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): user = await get_discordbot_user(user_id) if not user: @@ -75,7 +80,7 @@ async def api_discordbot_activate_extension( raise HTTPException( status_code=HTTPStatus.NOT_FOUND, detail="User does not exist." ) - update_user_extension(user_id=userid, extension=extension, active=active) + await update_user_extension(user_id=userid, extension=extension, active=active) return {"extension": "updated"} @@ -84,7 +89,7 @@ async def api_discordbot_activate_extension( @discordbot_ext.post("/api/v1/wallets") async def api_discordbot_wallets_create( - data: CreateUserWallet, wallet: WalletTypeInfo = Depends(get_key_type) + data: CreateUserWallet, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): user = await create_discordbot_wallet( user_id=data.user_id, wallet_name=data.wallet_name, admin_id=data.admin_id @@ -93,28 +98,30 @@ async def api_discordbot_wallets_create( @discordbot_ext.get("/api/v1/wallets") -async def api_discordbot_wallets(wallet: WalletTypeInfo = Depends(get_key_type)): +async def api_discordbot_wallets( + wallet: WalletTypeInfo = Depends(get_key_type), # type: ignore +): admin_id = wallet.wallet.user - return [wallet.dict() for wallet in await get_discordbot_wallets(admin_id)] + return await get_discordbot_wallets(admin_id) @discordbot_ext.get("/api/v1/transactions/{wallet_id}") async def api_discordbot_wallet_transactions( - wallet_id, wallet: WalletTypeInfo = Depends(get_key_type) + wallet_id, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): return await get_discordbot_wallet_transactions(wallet_id) @discordbot_ext.get("/api/v1/wallets/{user_id}") async def api_discordbot_users_wallets( - user_id, wallet: WalletTypeInfo = Depends(get_key_type) + user_id, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): - return [s_wallet.dict() for s_wallet in await get_discordbot_users_wallets(user_id)] + return await get_discordbot_users_wallets(user_id) @discordbot_ext.delete("/api/v1/wallets/{wallet_id}") async def api_discordbot_wallets_delete( - wallet_id, wallet: WalletTypeInfo = Depends(get_key_type) + wallet_id, wallet: WalletTypeInfo = Depends(get_key_type) # type: ignore ): get_wallet = await get_discordbot_wallet(wallet_id) if not get_wallet: diff --git a/lnbits/extensions/example/views.py b/lnbits/extensions/example/views.py index 252b47263..29b257f45 100644 --- a/lnbits/extensions/example/views.py +++ b/lnbits/extensions/example/views.py @@ -12,7 +12,10 @@ templates = Jinja2Templates(directory="templates") @example_ext.get("/", response_class=HTMLResponse) -async def index(request: Request, user: User = Depends(check_user_exists)): +async def index( + request: Request, + user: User = Depends(check_user_exists), # type: ignore +): return example_renderer().TemplateResponse( "example/index.html", {"request": request, "user": user.dict()} ) diff --git a/lnbits/extensions/subdomains/crud.py b/lnbits/extensions/subdomains/crud.py index 207e2d1d4..aa358d11c 100644 --- a/lnbits/extensions/subdomains/crud.py +++ b/lnbits/extensions/subdomains/crud.py @@ -3,10 +3,10 @@ from typing import List, Optional, Union from lnbits.helpers import urlsafe_short_hash from . import db -from .models import CreateDomain, Domains, Subdomains +from .models import CreateDomain, CreateSubdomain, Domains, Subdomains -async def create_subdomain(payment_hash, wallet, data: CreateDomain) -> Subdomains: +async def create_subdomain(payment_hash, wallet, data: CreateSubdomain) -> Subdomains: await db.execute( """ INSERT INTO subdomains.subdomain (id, domain, email, subdomain, ip, wallet, sats, duration, paid, record_type) diff --git a/lnbits/extensions/subdomains/models.py b/lnbits/extensions/subdomains/models.py index 170045040..39e176155 100644 --- a/lnbits/extensions/subdomains/models.py +++ b/lnbits/extensions/subdomains/models.py @@ -3,24 +3,24 @@ from pydantic.main import BaseModel class CreateDomain(BaseModel): - wallet: str = Query(...) - domain: str = Query(...) - cf_token: str = Query(...) - cf_zone_id: str = Query(...) - webhook: str = Query("") - description: str = Query(..., min_length=0) - cost: int = Query(..., ge=0) - allowed_record_types: str = Query(...) + wallet: str = Query(...) # type: ignore + domain: str = Query(...) # type: ignore + cf_token: str = Query(...) # type: ignore + cf_zone_id: str = Query(...) # type: ignore + webhook: str = Query("") # type: ignore + description: str = Query(..., min_length=0) # type: ignore + cost: int = Query(..., ge=0) # type: ignore + allowed_record_types: str = Query(...) # type: ignore class CreateSubdomain(BaseModel): - domain: str = Query(...) - subdomain: str = Query(...) - email: str = Query(...) - ip: str = Query(...) - sats: int = Query(..., ge=0) - duration: int = Query(...) - record_type: str = Query(...) + domain: str = Query(...) # type: ignore + subdomain: str = Query(...) # type: ignore + email: str = Query(...) # type: ignore + ip: str = Query(...) # type: ignore + sats: int = Query(..., ge=0) # type: ignore + duration: int = Query(...) # type: ignore + record_type: str = Query(...) # type: ignore class Domains(BaseModel): diff --git a/lnbits/extensions/subdomains/tasks.py b/lnbits/extensions/subdomains/tasks.py index 04ee2dd48..c5a7f47ba 100644 --- a/lnbits/extensions/subdomains/tasks.py +++ b/lnbits/extensions/subdomains/tasks.py @@ -20,7 +20,7 @@ async def wait_for_paid_invoices(): async def on_invoice_paid(payment: Payment) -> None: - if payment.extra.get("tag") != "lnsubdomain": + if not payment.extra or payment.extra.get("tag") != "lnsubdomain": # not an lnurlp invoice return @@ -37,7 +37,7 @@ async def on_invoice_paid(payment: Payment) -> None: ) ### Use webhook to notify about cloudflare registration - if domain.webhook: + if domain and domain.webhook: async with httpx.AsyncClient() as client: try: r = await client.post( diff --git a/lnbits/extensions/subdomains/views.py b/lnbits/extensions/subdomains/views.py index df387ba8c..962f850d0 100644 --- a/lnbits/extensions/subdomains/views.py +++ b/lnbits/extensions/subdomains/views.py @@ -16,7 +16,9 @@ templates = Jinja2Templates(directory="templates") @subdomains_ext.get("/", response_class=HTMLResponse) -async def index(request: Request, user: User = Depends(check_user_exists)): +async def index( + request: Request, user: User = Depends(check_user_exists) # type:ignore +): return subdomains_renderer().TemplateResponse( "subdomains/index.html", {"request": request, "user": user.dict()} ) diff --git a/lnbits/extensions/subdomains/views_api.py b/lnbits/extensions/subdomains/views_api.py index b01e6ffbb..34d8e75be 100644 --- a/lnbits/extensions/subdomains/views_api.py +++ b/lnbits/extensions/subdomains/views_api.py @@ -29,12 +29,15 @@ from .crud import ( @subdomains_ext.get("/api/v1/domains") async def api_domains( - g: WalletTypeInfo = Depends(get_key_type), all_wallets: bool = Query(False) + g: WalletTypeInfo = Depends(get_key_type), # type: ignore + all_wallets: bool = Query(False), ): wallet_ids = [g.wallet.id] if all_wallets: - wallet_ids = (await get_user(g.wallet.user)).wallet_ids + user = await get_user(g.wallet.user) + if user is not None: + wallet_ids = user.wallet_ids return [domain.dict() for domain in await get_domains(wallet_ids)] @@ -42,7 +45,9 @@ async def api_domains( @subdomains_ext.post("/api/v1/domains") @subdomains_ext.put("/api/v1/domains/{domain_id}") async def api_domain_create( - data: CreateDomain, domain_id=None, g: WalletTypeInfo = Depends(get_key_type) + data: CreateDomain, + domain_id=None, + g: WalletTypeInfo = Depends(get_key_type), # type: ignore ): if domain_id: domain = await get_domain(domain_id) @@ -63,7 +68,9 @@ async def api_domain_create( @subdomains_ext.delete("/api/v1/domains/{domain_id}") -async def api_domain_delete(domain_id, g: WalletTypeInfo = Depends(get_key_type)): +async def api_domain_delete( + domain_id, g: WalletTypeInfo = Depends(get_key_type) # type: ignore +): domain = await get_domain(domain_id) if not domain: @@ -82,12 +89,14 @@ async def api_domain_delete(domain_id, g: WalletTypeInfo = Depends(get_key_type) @subdomains_ext.get("/api/v1/subdomains") async def api_subdomains( - all_wallets: bool = Query(False), g: WalletTypeInfo = Depends(get_key_type) + all_wallets: bool = Query(False), g: WalletTypeInfo = Depends(get_key_type) # type: ignore ): wallet_ids = [g.wallet.id] if all_wallets: - wallet_ids = (await get_user(g.wallet.user)).wallet_ids + user = await get_user(g.wallet.user) + if user is not None: + wallet_ids = user.wallet_ids return [domain.dict() for domain in await get_subdomains(wallet_ids)] @@ -173,7 +182,9 @@ async def api_subdomain_send_subdomain(payment_hash): @subdomains_ext.delete("/api/v1/subdomains/{subdomain_id}") -async def api_subdomain_delete(subdomain_id, g: WalletTypeInfo = Depends(get_key_type)): +async def api_subdomain_delete( + subdomain_id, g: WalletTypeInfo = Depends(get_key_type) # type: ignore +): subdomain = await get_subdomain(subdomain_id) if not subdomain: diff --git a/pyproject.toml b/pyproject.toml index 19dac8600..7418de272 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,8 +89,34 @@ profile = "black" ignore_missing_imports = "True" files = "lnbits" exclude = """(?x)( - ^lnbits/extensions. - | ^lnbits/wallets/lnd_grpc_files. + ^lnbits/extensions/bleskomat. + | ^lnbits/extensions/boltz. + | ^lnbits/extensions/boltcards. + | ^lnbits/extensions/events. + | ^lnbits/extensions/hivemind. + | ^lnbits/extensions/invoices. + | ^lnbits/extensions/jukebox. + | ^lnbits/extensions/livestream. + | ^lnbits/extensions/lnaddress. + | ^lnbits/extensions/lndhub. + | ^lnbits/extensions/lnticket. + | ^lnbits/extensions/lnurldevice. + | ^lnbits/extensions/lnurlp. + | ^lnbits/extensions/lnurlpayout. + | ^lnbits/extensions/ngrok. + | ^lnbits/extensions/offlineshop. + | ^lnbits/extensions/paywall. + | ^lnbits/extensions/satsdice. + | ^lnbits/extensions/satspay. + | ^lnbits/extensions/scrub. + | ^lnbits/extensions/splitpayments. + | ^lnbits/extensions/streamalerts. + | ^lnbits/extensions/tipjar. + | ^lnbits/extensions/tpos. + | ^lnbits/extensions/usermanager. + | ^lnbits/extensions/watchonly. + | ^lnbits/extensions/withdraw. + | ^lnbits/wallets/lnd_grpc_files. )""" [tool.pytest.ini_options]