diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 02c7785c..d7105d15 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1 +1 @@
-* @jbonzo @mmoghaddam385 @justinpolygon
+* @justinpolygon @penelopus @davidwf-polygonio
diff --git a/.polygon/rest.json b/.polygon/rest.json
index 94dd8286..425d2c2d 100644
--- a/.polygon/rest.json
+++ b/.polygon/rest.json
@@ -14644,7 +14644,7 @@
"volume": 37
},
"ticker": "NCLH",
- "type": "stock"
+ "type": "stocks"
},
{
"last_updated": 1679597116344223500,
@@ -14895,6 +14895,16 @@
"format": "double",
"type": "number"
},
+ "regular_trading_change": {
+ "description": "Today's change in regular trading hours, difference between current price and previous trading day's close, otherwise difference between today's close and previous day's close.",
+ "format": "double",
+ "type": "number"
+ },
+ "regular_trading_change_percent": {
+ "description": "Today's regular trading change as a percentage.",
+ "format": "double",
+ "type": "number"
+ },
"volume": {
"description": "The trading volume for the asset for the day.",
"format": "double",
@@ -14931,15 +14941,7 @@
}
},
"required": [
- "ticker",
- "name",
- "price",
- "branding",
- "market_status",
- "type",
- "session",
- "options",
- "last_updated"
+ "ticker"
],
"type": "object",
"x-polygon-go-type": {
@@ -18576,6 +18578,11 @@
"properties": {
"sentiment": {
"description": "The sentiment of the insight.",
+ "enum": [
+ "positive",
+ "neutral",
+ "negative"
+ ],
"type": "string"
},
"sentiment_reasoning": {
@@ -24328,7 +24335,7 @@
"operationId": "ListDividends",
"parameters": [
{
- "description": "Return the dividends that contain this ticker.",
+ "description": "Specify a case-sensitive ticker symbol. For example, AAPL represents Apple Inc.",
"in": "query",
"name": "ticker",
"schema": {
@@ -24708,6 +24715,7 @@
"dividend_type": "CD",
"ex_dividend_date": "2021-11-05",
"frequency": 4,
+ "id": "E8e3c4f794613e9205e2f178a36c53fcc57cdabb55e1988c87b33f9e52e221444",
"pay_date": "2021-11-11",
"record_date": "2021-11-08",
"ticker": "AAPL"
@@ -24718,6 +24726,7 @@
"dividend_type": "CD",
"ex_dividend_date": "2021-08-06",
"frequency": 4,
+ "id": "E6436c5475706773f03490acf0b63fdb90b2c72bfeed329a6eb4afc080acd80ae",
"pay_date": "2021-08-12",
"record_date": "2021-08-09",
"ticker": "AAPL"
@@ -24806,6 +24815,10 @@
]
}
},
+ "id": {
+ "description": "The unique identifier of the dividend.",
+ "type": "string"
+ },
"pay_date": {
"description": "The date that the dividend is paid out.",
"type": "string"
@@ -24832,7 +24845,8 @@
"ex_dividend_date",
"frequency",
"cash_amount",
- "dividend_type"
+ "dividend_type",
+ "id"
],
"type": "object",
"x-polygon-go-struct-tags": {
@@ -25841,12 +25855,14 @@
"results": [
{
"execution_date": "2020-08-31",
+ "id": "E36416cce743c3964c5da63e1ef1626c0aece30fb47302eea5a49c0055c04e8d0",
"split_from": 1,
"split_to": 4,
"ticker": "AAPL"
},
{
"execution_date": "2005-02-28",
+ "id": "E90a77bdf742661741ed7c8fc086415f0457c2816c45899d73aaa88bdc8ff6025",
"split_from": 1,
"split_to": 2,
"ticker": "AAPL"
@@ -25870,6 +25886,10 @@
"description": "The execution date of the stock split. On this date the stock split was applied.",
"type": "string"
},
+ "id": {
+ "description": "The unique identifier for this stock split.",
+ "type": "string"
+ },
"split_from": {
"description": "The second number in the split ratio.\n\nFor example: In a 2-for-1 split, split_from would be 1.",
"format": "float",
@@ -25887,7 +25907,10 @@
},
"required": [
"split_from",
- "split_to"
+ "split_to",
+ "id",
+ "ticker",
+ "execution_date"
],
"type": "object"
},
@@ -27197,8 +27220,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ask",
"bid",
"last_updated",
@@ -27272,9 +27293,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
- "participant_timestamp",
"price",
"size"
],
@@ -27362,6 +27380,16 @@
"format": "double",
"type": "number"
},
+ "regular_trading_change": {
+ "description": "Today's change in regular trading hours, difference between current price and previous trading day's close, otherwise difference between today's close and previous day's close.",
+ "format": "double",
+ "type": "number"
+ },
+ "regular_trading_change_percent": {
+ "description": "Today's regular trading change as a percentage.",
+ "format": "double",
+ "type": "number"
+ },
"volume": {
"description": "The trading volume for the asset for the day.",
"format": "double",
@@ -27438,8 +27466,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ticker",
"change_to_break_even"
],
@@ -27753,9 +27779,7 @@
}
},
"required": [
- "ticker",
- "timeframe",
- "last_updated"
+ "ticker"
],
"type": "object",
"x-polygon-go-type": {
@@ -28014,7 +28038,8 @@
"bid": 120.28,
"bid_size": 8,
"last_updated": 1605195918507251700,
- "midpoint": 120.29
+ "midpoint": 120.29,
+ "timeframe": "REAL-TIME"
},
"last_trade": {
"conditions": [
@@ -28115,7 +28140,6 @@
}
},
"required": [
- "last_updated",
"open",
"high",
"low",
@@ -28289,8 +28313,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ask",
"ask_size",
"bid_size",
@@ -28343,7 +28365,6 @@
}
},
"required": [
- "timeframe",
"exchange",
"price",
"sip_timestamp",
@@ -28400,8 +28421,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ticker",
"change_to_break_even"
],
@@ -28416,7 +28435,6 @@
"last_quote",
"underlying_asset",
"details",
- "cha",
"break_even_price",
"open_interest"
],
@@ -28651,7 +28669,6 @@
}
},
"required": [
- "last_updated",
"open",
"high",
"low",
@@ -28825,8 +28842,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ask",
"ask_size",
"bid_size",
@@ -28879,7 +28894,6 @@
}
},
"required": [
- "timeframe",
"exchange",
"price",
"sip_timestamp",
@@ -28936,8 +28950,6 @@
}
},
"required": [
- "last_updated",
- "timeframe",
"ticker",
"change_to_break_even"
],
@@ -28952,7 +28964,6 @@
"last_quote",
"underlying_asset",
"details",
- "cha",
"break_even_price",
"open_interest"
],
@@ -30486,6 +30497,388 @@
}
}
},
+ "/vX/reference/ipos": {
+ "get": {
+ "description": "The IPOs API provides access to detailed information about Initial Public Offerings (IPOs), including both upcoming and historical events. With this API, you can query for a comprehensive list of IPOs, along with key details such as the issuer name, ticker symbol, ISIN, IPO date, number of shares offered, expected price range, and final offering price. You can filter the results by status to focus on new, rumors, pending, historical, and more.",
+ "operationId": "ListIPOs",
+ "parameters": [
+ {
+ "description": "Specify a case-sensitive ticker symbol. For example, AAPL represents Apple Inc.",
+ "in": "query",
+ "name": "ticker",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Specify a us_code. This is a unique nine-character alphanumeric code that identifies a North American financial security for the purposes of facilitating clearing and settlement of trades.",
+ "in": "query",
+ "name": "us_code",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Specify an International Securities Identification Number (ISIN). This is a unique twelve-digit code that is assigned to every security issuance in the world.",
+ "in": "query",
+ "name": "isin",
+ "schema": {
+ "type": "string"
+ }
+ },
+ {
+ "description": "Specify a listing date. This is the first trading date for the newly listed entity.",
+ "in": "query",
+ "name": "listing_date",
+ "schema": {
+ "format": "date",
+ "type": "string"
+ },
+ "x-polygon-filter-field": {
+ "range": true,
+ "type": "string"
+ }
+ },
+ {
+ "description": "Specify an IPO status.",
+ "in": "query",
+ "name": "ipo_status",
+ "schema": {
+ "enum": [
+ "direct_listing_process",
+ "history",
+ "new",
+ "pending",
+ "postponed",
+ "rumor",
+ "withdrawn"
+ ],
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by listing_date.",
+ "in": "query",
+ "name": "listing_date.gte",
+ "schema": {
+ "format": "date",
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by listing_date.",
+ "in": "query",
+ "name": "listing_date.gt",
+ "schema": {
+ "format": "date",
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by listing_date.",
+ "in": "query",
+ "name": "listing_date.lte",
+ "schema": {
+ "format": "date",
+ "type": "string"
+ }
+ },
+ {
+ "description": "Range by listing_date.",
+ "in": "query",
+ "name": "listing_date.lt",
+ "schema": {
+ "format": "date",
+ "type": "string"
+ }
+ },
+ {
+ "description": "Order results based on the `sort` field.",
+ "in": "query",
+ "name": "order",
+ "schema": {
+ "default": "desc",
+ "enum": [
+ "asc",
+ "desc"
+ ],
+ "example": "asc",
+ "type": "string"
+ }
+ },
+ {
+ "description": "Limit the number of results returned, default is 10 and max is 1000.",
+ "in": "query",
+ "name": "limit",
+ "schema": {
+ "default": 10,
+ "example": 10,
+ "maximum": 1000,
+ "minimum": 1,
+ "type": "integer"
+ }
+ },
+ {
+ "description": "Sort field used for ordering.",
+ "in": "query",
+ "name": "sort",
+ "schema": {
+ "default": "listing_date",
+ "enum": [
+ "listing_date",
+ "ticker",
+ "last_updated",
+ "security_type",
+ "issuer_name",
+ "currency_code",
+ "isin",
+ "us_code",
+ "final_issue_price",
+ "min_shares_offered",
+ "max_shares_offered",
+ "lowest_offer_price",
+ "highest_offer_price",
+ "total_offer_size",
+ "shares_outstanding",
+ "primary_exchange",
+ "lot_size",
+ "security_description",
+ "ipo_status"
+ ],
+ "example": "listing_date",
+ "type": "string"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "content": {
+ "application/json": {
+ "example": {
+ "next_url": "https://api.polygon.io/vX/reference/ipos?cursor=YWN0aXZlPXRydWUmZGF0ZT0yMDIxLTA0LTI1JmxpbWl0PTEmb3JkZXI9YXNjJnBhZ2VfbWFya2VyPUElN0M5YWRjMjY0ZTgyM2E1ZjBiOGUyNDc5YmZiOGE1YmYwNDVkYzU0YjgwMDcyMWE2YmI1ZjBjMjQwMjU4MjFmNGZiJnNvcnQ9dGlja2Vy",
+ "request_id": "6a7e466379af0a71039d60cc78e72282",
+ "results": [
+ {
+ "currency_code": "USD",
+ "final_issue_price": 17,
+ "highest_offer_price": 17,
+ "ipo_status": "history",
+ "isin": "US75383L1026",
+ "issue_end_date": "2024-06-06",
+ "issue_start_date": "2024-06-01",
+ "issuer_name": "Rapport Therapeutics Inc.",
+ "last_updated": "2024-06-27",
+ "listing_date": "2024-06-07",
+ "lot_size": 100,
+ "lowest_offer_price": 17,
+ "max_shares_offered": 8000000,
+ "min_shares_offered": 1000000,
+ "primary_exchange": "XNAS",
+ "security_description": "Ordinary Shares",
+ "security_type": "CS",
+ "shares_outstanding": 35376457,
+ "ticker": "RAPP",
+ "total_offer_size": 136000000,
+ "us_code": "75383L102"
+ }
+ ],
+ "status": "OK"
+ },
+ "schema": {
+ "properties": {
+ "next_url": {
+ "description": "If present, this value can be used to fetch the next page of data.",
+ "type": "string"
+ },
+ "request_id": {
+ "description": "A request id assigned by the server.",
+ "type": "string"
+ },
+ "results": {
+ "description": "An array of results containing the requested data.",
+ "items": {
+ "properties": {
+ "currency_code": {
+ "description": "Underlying currency of the security.",
+ "example": "USD",
+ "type": "string"
+ },
+ "final_issue_price": {
+ "description": "The price set by the company and its underwriters before the IPO goes live.",
+ "example": 14.5,
+ "format": "float",
+ "type": "number"
+ },
+ "highest_offer_price": {
+ "description": "The highest price within the IPO price range that the company might use to price the shares.",
+ "example": 20,
+ "format": "float",
+ "type": "number"
+ },
+ "ipo_status": {
+ "description": "The status of the IPO event. IPO events start out as status \"rumor\" or \"pending\". On listing day, the status changes to \"new\". After the listing day, the status changes to \"history\".\n\nThe status \"direct_listing_process\" corresponds to a type of offering where, instead of going through all the IPO processes, the company decides to list its shares directly on an exchange, without using an investment bank or other intermediaries. This is called a direct listing, direct placement, or direct public offering (DPO).",
+ "enum": [
+ "direct_listing_process",
+ "history",
+ "new",
+ "pending",
+ "postponed",
+ "rumor",
+ "withdrawn"
+ ],
+ "example": "history",
+ "type": "string"
+ },
+ "isin": {
+ "description": "International Securities Identification Number. This is a unique twelve-digit code that is assigned to every security issuance in the world.",
+ "example": "US0378331005",
+ "type": "string"
+ },
+ "issuer_name": {
+ "description": "Name of issuer.",
+ "example": "Apple Inc.",
+ "type": "string"
+ },
+ "last_updated": {
+ "description": "The date when the IPO event was last modified.",
+ "example": "2023-01-02",
+ "format": "date",
+ "type": "string"
+ },
+ "listing_date": {
+ "description": "First trading date for the newly listed entity.",
+ "example": "2023-02-01",
+ "format": "date",
+ "type": "string"
+ },
+ "lot_size": {
+ "description": "The minimum number of shares that can be bought or sold in a single transaction.",
+ "example": 100,
+ "type": "number"
+ },
+ "lowest_offer_price": {
+ "description": "The lowest price within the IPO price range that the company is willing to offer its shares to investors.",
+ "example": 10,
+ "format": "float",
+ "type": "number"
+ },
+ "max_shares_offered": {
+ "description": "The upper limit of the shares that the company is offering to investors.",
+ "example": 1000,
+ "type": "number"
+ },
+ "min_shares_offered": {
+ "description": "The lower limit of shares that the company is willing to sell in the IPO.",
+ "example": 1000,
+ "type": "number"
+ },
+ "primary_exchange": {
+ "description": "Market Identifier Code (MIC) of the primary exchange where the security is listed. The Market Identifier Code (MIC) (ISO 10383) is a unique identification code used to identify securities trading exchanges, regulated and non-regulated trading markets.",
+ "example": "XNAS",
+ "type": "string"
+ },
+ "security_description": {
+ "description": "Description of the security.",
+ "example": "Ordinary Shares - Class A",
+ "type": "string"
+ },
+ "security_type": {
+ "description": "The classification of the stock. For example, \"CS\" stands for Common Stock.",
+ "example": "CS",
+ "type": "string"
+ },
+ "shares_outstanding": {
+ "description": "The total number of shares that the company has issued and are held by investors.",
+ "example": 1000000,
+ "type": "number"
+ },
+ "ticker": {
+ "description": "The ticker symbol of the IPO event.",
+ "example": "AAPL",
+ "type": "string"
+ },
+ "total_offer_size": {
+ "description": "The total amount raised by the company for IPO.",
+ "example": 1000000,
+ "format": "float",
+ "type": "number"
+ },
+ "us_code": {
+ "description": "This is a unique nine-character alphanumeric code that identifies a North American financial security for the purposes of facilitating clearing and settlement of trades.",
+ "example": 37833100,
+ "type": "string"
+ }
+ },
+ "required": [
+ "name",
+ "last_updated",
+ "primary_exchange",
+ "security_type",
+ "security_description",
+ "ipo_status"
+ ],
+ "type": "object",
+ "x-polygon-go-type": {
+ "name": "IPOsResult"
+ }
+ },
+ "type": "array"
+ },
+ "status": {
+ "description": "The status of this request's response.",
+ "type": "string"
+ }
+ },
+ "type": "object"
+ }
+ }
+ },
+ "description": "A list of IPO events."
+ }
+ },
+ "summary": "IPOs",
+ "tags": [
+ "reference:stocks:ipos"
+ ],
+ "x-polygon-entitlement-data-type": {
+ "description": "Reference data",
+ "name": "reference"
+ },
+ "x-polygon-paginate": {
+ "limit": {
+ "default": 10,
+ "max": 1000
+ },
+ "order": {
+ "default": "desc"
+ },
+ "sort": {
+ "default": "listing_date",
+ "enum": [
+ "listing_date",
+ "ticker",
+ "last_updated",
+ "security_type",
+ "issuer_name",
+ "currency_code",
+ "isin",
+ "us_code",
+ "final_issue_price",
+ "min_shares_offered",
+ "max_shares_offered",
+ "lowest_offer_price",
+ "highest_offer_price",
+ "total_offer_size",
+ "shares_outstanding",
+ "primary_exchange",
+ "lot_size",
+ "security_description",
+ "ipo_status"
+ ]
+ }
+ }
+ }
+ },
"/vX/reference/tickers/taxonomies": {
"get": {
"description": "Many investors place a high value on sector data. It is used to measure economic activity, identify peers and competitors, build ETF products, quantify market share, and compare company performance. However, there are some limitations to industry standard sectors:\n* They have difficulty identifying the primary area of activity for large, complex businesses.\n* Studies confirm significant disagreement between classification schemes when attempting to categorize the same companies.\n* The systems' hierarchical nature is inflexible and struggles to convey business nuances.\n \n \nAs a result, we've developed a new taxonomy to supplement existing sector classifications. The taxonomy is created by reviewing related 10K filings to create a set of structured categories and tags.\n \n \nThe categories are based on company operating models and are industry agnostic. Our current version only supports one category, Revenue Streams, with future plans to support more.\n \n \nThe tags define a specific type within the category. Within the Revenue Streams category, for example, tags for \"product sales\" and \"advertising\" may be found. A company may have many tags in a given category. The complete Revenue Streams taxonomy is shown below.\n \n \nOur taxonomy is powered by AI and is currently in early beta testing. You should expect some inaccuracies in the responses.\n \n \n## **Revenue Streams**\n *Latest Revision (7/7/2023)*\n \n \n- **Physical Product Sales:**\n Revenue generated from the sale of tangible goods or physical products to customers, either in-store or online.\n - Consumer Goods\n - Industrial Goods\n - Electronics\n - Vehicles\n - Healthcare Products\n \n \n- **Digital Product Sales:**\n Revenue earned from the sale of digital goods or products, such as software licenses, e-books, music downloads, or digital media content. It also includes revenue obtained by selling aggregated, anonymized, or processed data to third parties for market research, analytics, or other purposes.\n - Software\n - E-books and Digital Media\n - Mobile Applications\n - Games\n - Online Courses\n - Market Research Data\n - Customer Behavior Data\n \n \n- **Professional Services:**\n Revenue obtained by providing specialized services, expertise, or consulting to clients in exchange for fees. This includes services offered by professionals such as lawyers, accountants, or consultants.\n - Consulting\n - Legal Services\n - Financial Services\n - Marketing Services\n - Construction Services\n - Education & Tutoring\n \n \n- **Consumer Services:**\n Revenue earned from providing services directly to consumers, including services like healthcare, personal grooming, fitness, or hospitality.\n - Dining & Hospitality\n - Personal Care\n - Entertainment & Recreation\n - Fitness & Wellness\n - Travel & Tourism\n - Transportation\n - Home Services\n - Child & Family Care\n - Automotive\n \n \n- **Subscription-based Revenue:**\n Revenue obtained from recurring fees charged to customers for accessing a product or service over a defined period. This includes revenue from subscription-based models, membership programs, or software-as-a-service (SaaS) offerings.\n - Software as a Service (SaaS)\n - Streaming Services\n - Physical Media\n - Memberships\n \n \n- **Licensing and Royalties:**\n Revenue generated from the licensing of intellectual property rights to third parties, including franchise rights, patent licensing, brand licensing, and the receipt of royalties for authorized use of intellectual property like music royalties, book royalties, or patent royalties.\n - Franchise Fees\n - Patent Licensing\n - Brand Licensing\n - Media Royalties\n \n \n- **Advertising:**\n Revenue generated by displaying ads or promotional content to customers, whether through traditional or digital advertising channels, including revenue from display ads, sponsored content, or affiliate marketing.\n - Print Advertising\n - Online Display Advertising\n - Social Media Advertising\n - Influencer Marketing\n \n \n- **Commission-Based Revenue:**\n Revenue earned by acting as an intermediary and receiving a percentage or commission on sales made on behalf of another party. This includes revenue from affiliate programs, referral fees, or any other commission-based revenue models.\n - Real Estate Commissions\n - Affiliate Marketing Commissions\n - Online Marketplace Commissions\n \n \n- **Rentals or Leasing:**\n Revenue earned by leasing or renting out assets, properties, or equipment to customers, including rental income from real estate properties, equipment leasing, or vehicle rentals.\n - Property Rentals\n - Equipment Leasing\n - Vehicle Rentals",
@@ -31421,6 +31814,16 @@
"paths": [
"/v1/related-companies/{ticker}"
]
+ },
+ {
+ "paths": [
+ "/vX/reference/ipos"
+ ]
+ },
+ {
+ "paths": [
+ "/vX/reference/short-interest/{identifier_type}/{identifier}"
+ ]
}
]
}
diff --git a/.polygon/websocket.json b/.polygon/websocket.json
index 8d7d539b..ff09762a 100644
--- a/.polygon/websocket.json
+++ b/.polygon/websocket.json
@@ -269,7 +269,7 @@
},
"c": {
"type": "array",
- "description": "The trade conditions. See Conditions and Indicators\" for Polygon.io's trade conditions glossary.\n",
+ "description": "The trade conditions. See Conditions and Indicators for Polygon.io's trade conditions glossary.\n",
"items": {
"type": "integer",
"description": "The ID of the condition."
@@ -277,7 +277,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
@@ -407,7 +407,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
@@ -3964,7 +3964,7 @@
},
"c": {
"type": "array",
- "description": "The trade conditions. See Conditions and Indicators\" for Polygon.io's trade conditions glossary.\n",
+ "description": "The trade conditions. See Conditions and Indicators for Polygon.io's trade conditions glossary.\n",
"items": {
"type": "integer",
"description": "The ID of the condition."
@@ -3972,7 +3972,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
@@ -4041,7 +4041,7 @@
},
"t": {
"type": "integer",
- "description": "The Timestamp in Unix MS."
+ "description": "The SIP timestamp in Unix MS."
},
"q": {
"type": "integer",
diff --git a/examples/rest/stocks-ipos.py b/examples/rest/stocks-ipos.py
new file mode 100644
index 00000000..54852335
--- /dev/null
+++ b/examples/rest/stocks-ipos.py
@@ -0,0 +1,13 @@
+from polygon import RESTClient
+
+# docs
+# https://polygon.io/docs/stocks/get_vx_reference_ipos
+
+# client = RESTClient("XXXXXX") # hardcoded api_key is used
+client = RESTClient() # POLYGON_API_KEY environment variable is used
+
+ipos = []
+for ipo in client.vx.list_ipos(ticker="RDDT"):
+ ipos.append(ipo)
+
+print(ipos)
diff --git a/examples/rest/stocks-stock_financials.py b/examples/rest/stocks-stock_financials.py
index dc356494..a75087e7 100644
--- a/examples/rest/stocks-stock_financials.py
+++ b/examples/rest/stocks-stock_financials.py
@@ -8,6 +8,13 @@
client = RESTClient() # POLYGON_API_KEY environment variable is used
financials = []
-for f in client.vx.list_stock_financials("AAPL"):
+for f in client.vx.list_stock_financials("AAPL", filing_date="2024-11-01"):
financials.append(f)
+
+ # get diluted_earnings_per_share
+ # print(f.financials.income_statement.diluted_earnings_per_share)
+
+ # get net_income_loss
+ # print(f.financials.income_statement.net_income_loss)
+
print(financials)
diff --git a/examples/tools/hunting-anomalies/README.md b/examples/tools/hunting-anomalies/README.md
new file mode 100644
index 00000000..4b36f1b5
--- /dev/null
+++ b/examples/tools/hunting-anomalies/README.md
@@ -0,0 +1,49 @@
+# Hunting Anomalies in the Stock Market
+
+This repository contains all the necessary scripts and data directories used in the [Hunting Anomalies in the Stock Market](https://polygon.io/blog/hunting-anomalies-in-stock-market/) tutorial, hosted on Polygon.io's blog. The tutorial demonstrates how to detect statistical anomalies in historical US stock market data through a comprehensive workflow that involves downloading data, building a lookup table, querying for anomalies, and visualizing them through a web interface.
+
+### Prerequisites
+
+- Python 3.8+
+- Access to Polygon.io's historical data via Flat Files
+- An active Polygon.io API key, obtainable by signing up for a Stocks paid plan
+
+### Repository Contents
+
+- `README.md`: This file, outlining setup and execution instructions.
+- `aggregates_day`: Directory where downloaded CSV data files are stored.
+- `build-lookup-table.py`: Python script to build a lookup table from the historical data.
+- `query-lookup-table.py`: Python script to query the lookup table for anomalies.
+- `gui-lookup-table.py`: Python script for a browser-based interface to explore anomalies visually.
+
+### Running the Tutorial
+
+1. **Ensure Python 3.8+ is installed:** Check your Python version and ensure all required libraries (polygon-api-client, pandas, pickle, and argparse) are installed.
+
+2. **Set up your API key:** Make sure you have an active paid Polygon.io Stock subscription for accessing Flat Files. Set up your API key in your environment or directly in the scripts where required.
+
+3. **Download Historical Data:** Use the MinIO client to download historical stock market data. Adjust the commands and paths based on the data you are interested in.
+ ```bash
+ mc alias set s3polygon https://files.polygon.io YOUR_ACCESS_KEY YOUR_SECRET_KEY
+ mc cp --recursive s3polygon/flatfiles/us_stocks_sip/day_aggs_v1/2024/08/ ./aggregates_day/
+ mc cp --recursive s3polygon/flatfiles/us_stocks_sip/day_aggs_v1/2024/09/ ./aggregates_day/
+ mc cp --recursive s3polygon/flatfiles/us_stocks_sip/day_aggs_v1/2024/10/ ./aggregates_day/
+ gunzip ./aggregates_day/*.gz
+ ```
+
+4. **Build the Lookup Table:** This script processes the downloaded data and builds a lookup table, saving it as `lookup_table.pkl`.
+ ```bash
+ python build-lookup-table.py
+ ```
+
+5. **Query Anomalies:** Replace `2024-10-18` with the date you want to analyze for anomalies.
+ ```bash
+ python query-lookup-table.py 2024-10-18
+ ```
+
+6. **Run the GUI:** Access the web interface at `http://localhost:8888` to explore the anomalies visually.
+ ```bash
+ python gui-lookup-table.py
+ ```
+
+For a complete step-by-step guide on each phase of the anomaly detection process, including additional configurations and troubleshooting, refer to the detailed [tutorial on our blog](https://polygon.io/blog/hunting-anomalies-in-stock-market/).
diff --git a/examples/tools/hunting-anomalies/aggregates_day/README.md b/examples/tools/hunting-anomalies/aggregates_day/README.md
new file mode 100644
index 00000000..a0ade480
--- /dev/null
+++ b/examples/tools/hunting-anomalies/aggregates_day/README.md
@@ -0,0 +1 @@
+Download flat files into here.
diff --git a/examples/tools/hunting-anomalies/build-lookup-table.py b/examples/tools/hunting-anomalies/build-lookup-table.py
new file mode 100644
index 00000000..16abca2d
--- /dev/null
+++ b/examples/tools/hunting-anomalies/build-lookup-table.py
@@ -0,0 +1,91 @@
+import os
+import pandas as pd # type: ignore
+from collections import defaultdict
+import pickle
+import json
+from typing import DefaultDict, Dict, Any, BinaryIO
+
+# Directory containing the daily CSV files
+data_dir = "./aggregates_day/"
+
+# Initialize a dictionary to hold trades data
+trades_data = defaultdict(list)
+
+# List all CSV files in the directory
+files = sorted([f for f in os.listdir(data_dir) if f.endswith(".csv")])
+
+print("Starting to process files...")
+
+# Process each file (assuming files are named in order)
+for file in files:
+ print(f"Processing {file}")
+ file_path = os.path.join(data_dir, file)
+ df = pd.read_csv(file_path)
+ # For each stock, store the date and relevant data
+ for _, row in df.iterrows():
+ ticker = row["ticker"]
+ date = pd.to_datetime(row["window_start"], unit="ns").date()
+ trades = row["transactions"]
+ close_price = row["close"] # Ensure 'close' column exists in your CSV
+ trades_data[ticker].append(
+ {"date": date, "trades": trades, "close_price": close_price}
+ )
+
+print("Finished processing files.")
+print("Building lookup table...")
+
+# Now, build the lookup table with rolling averages and percentage price change
+lookup_table: DefaultDict[str, Dict[str, Any]] = defaultdict(
+ dict
+) # Nested dict: ticker -> date -> stats
+
+for ticker, records in trades_data.items():
+ # Convert records to DataFrame
+ df_ticker = pd.DataFrame(records)
+ # Sort records by date
+ df_ticker.sort_values("date", inplace=True)
+ df_ticker.set_index("date", inplace=True)
+
+ # Calculate the percentage change in close_price
+ df_ticker["price_diff"] = (
+ df_ticker["close_price"].pct_change() * 100
+ ) # Multiply by 100 for percentage
+
+ # Shift trades to exclude the current day from rolling calculations
+ df_ticker["trades_shifted"] = df_ticker["trades"].shift(1)
+ # Calculate rolling average and standard deviation over the previous 5 days
+ df_ticker["avg_trades"] = df_ticker["trades_shifted"].rolling(window=5).mean()
+ df_ticker["std_trades"] = df_ticker["trades_shifted"].rolling(window=5).std()
+ # Store the data in the lookup table
+ for date, row in df_ticker.iterrows():
+ # Convert date to string for JSON serialization
+ date_str = date.strftime("%Y-%m-%d")
+ # Ensure rolling stats are available
+ if pd.notnull(row["avg_trades"]) and pd.notnull(row["std_trades"]):
+ lookup_table[ticker][date_str] = {
+ "trades": row["trades"],
+ "close_price": row["close_price"],
+ "price_diff": row["price_diff"],
+ "avg_trades": row["avg_trades"],
+ "std_trades": row["std_trades"],
+ }
+ else:
+ # Store data without rolling stats if not enough data points
+ lookup_table[ticker][date_str] = {
+ "trades": row["trades"],
+ "close_price": row["close_price"],
+ "price_diff": row["price_diff"],
+ "avg_trades": None,
+ "std_trades": None,
+ }
+
+print("Lookup table built successfully.")
+
+# Convert defaultdict to regular dict for JSON serialization
+lookup_table_dict = {k: v for k, v in lookup_table.items()}
+
+# Save the lookup table to a file for later use
+with open("lookup_table.pkl", "wb") as f: # type: BinaryIO
+ pickle.dump(lookup_table_dict, f)
+
+print("Lookup table saved to 'lookup_table.pkl'.")
diff --git a/examples/tools/hunting-anomalies/gui-lookup-table.py b/examples/tools/hunting-anomalies/gui-lookup-table.py
new file mode 100644
index 00000000..df58746c
--- /dev/null
+++ b/examples/tools/hunting-anomalies/gui-lookup-table.py
@@ -0,0 +1,302 @@
+import os
+import pickle
+import json
+from datetime import datetime
+from polygon import RESTClient
+from polygon.rest.models import Agg
+import http.server
+import socketserver
+import traceback
+from urllib.parse import urlparse, parse_qs
+
+PORT = 8888
+
+# Load the lookup_table
+with open("lookup_table.pkl", "rb") as f:
+ lookup_table = pickle.load(f)
+
+
+class handler(http.server.SimpleHTTPRequestHandler):
+ def do_GET(self):
+ # Parse the path and query parameters
+ parsed_path = urlparse(self.path)
+ path = parsed_path.path
+ query_params = parse_qs(parsed_path.query)
+
+ if path == "/":
+ # Handle the root path
+ # Get the date parameter if provided
+ date_param = query_params.get("date", [None])[0]
+
+ # Get all dates from the lookup table
+ all_dates = set()
+ for ticker_data in lookup_table.values():
+ all_dates.update(ticker_data.keys())
+ all_dates = sorted(all_dates)
+
+ # If date is None, get the latest date from the lookup table
+ if date_param is None:
+ if all_dates:
+ latest_date = max(all_dates)
+ else:
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ html_content = (
+ "
No data available. "
+ )
+ self.wfile.write(html_content.encode())
+ return
+ else:
+ latest_date = date_param
+
+ # Ensure latest_date is in all_dates
+ if latest_date not in all_dates:
+ # Handle the case where the provided date is invalid
+ self.send_response(400)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ error_html = f"Error: No data available for date {latest_date} "
+ self.wfile.write(error_html.encode())
+ return
+
+ # Now, get the anomalies for the latest_date
+ anomalies = []
+ for ticker, date_data in lookup_table.items():
+ if latest_date in date_data:
+ data = date_data[latest_date]
+ trades = data["trades"]
+ avg_trades = data["avg_trades"]
+ std_trades = data["std_trades"]
+ if (
+ avg_trades is not None
+ and std_trades is not None
+ and std_trades > 0
+ ):
+ z_score = (trades - avg_trades) / std_trades
+ threshold_multiplier = 3 # Adjust as needed
+ if z_score > threshold_multiplier:
+ anomalies.append(
+ {
+ "ticker": ticker,
+ "date": latest_date,
+ "trades": trades,
+ "avg_trades": avg_trades,
+ "std_trades": std_trades,
+ "z_score": z_score,
+ "close_price": data["close_price"],
+ "price_diff": data["price_diff"],
+ }
+ )
+ # Sort anomalies by trades in descending order
+ anomalies.sort(key=lambda x: x["trades"], reverse=True)
+ # Generate the HTML to display the anomalies
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ # Build the HTML content
+ html_content = 'Anomalies for {} '.format(
+ latest_date
+ )
+ html_content += 'Anomalies for {} '.format(
+ latest_date
+ )
+ # Add navigation links (prev and next dates)
+ current_index = all_dates.index(latest_date)
+ prev_date = all_dates[current_index - 1] if current_index > 0 else None
+ next_date = (
+ all_dates[current_index + 1]
+ if current_index < len(all_dates) - 1
+ else None
+ )
+ html_content += "
"
+ if prev_date:
+ html_content += 'Previous Date '.format(
+ prev_date
+ )
+ if next_date:
+ html_content += 'Next Date '.format(next_date)
+ html_content += "
"
+ # Display the anomalies in a table
+ html_content += (
+ '
'
+ )
+ html_content += ""
+ html_content += "Ticker "
+ html_content += "Trades "
+ html_content += "Avg Trades "
+ html_content += "Std Dev "
+ html_content += "Z-score "
+ html_content += "Close Price "
+ html_content += "Price Diff "
+ html_content += "Chart "
+ html_content += " "
+ for anomaly in anomalies:
+ html_content += ""
+ html_content += "{} ".format(anomaly["ticker"])
+ html_content += "{} ".format(anomaly["trades"])
+ html_content += "{:.2f} ".format(anomaly["avg_trades"])
+ html_content += "{:.2f} ".format(anomaly["std_trades"])
+ html_content += "{:.2f} ".format(anomaly["z_score"])
+ html_content += "{:.2f} ".format(anomaly["close_price"])
+ html_content += "{:.2f} ".format(anomaly["price_diff"])
+ # Add a link to the chart
+ html_content += (
+ 'View Chart '.format(
+ anomaly["ticker"], latest_date
+ )
+ )
+ html_content += " "
+ html_content += '
'
+ html_content += "
"
+ self.wfile.write(html_content.encode())
+ elif path == "/chart":
+ # Handle the chart page
+ # Get 'ticker' and 'date' from query parameters
+ ticker = query_params.get("ticker", [None])[0]
+ date = query_params.get("date", [None])[0]
+ if ticker is None or date is None:
+ # Return an error page
+ self.send_response(400)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ error_html = "Error: Missing ticker or date parameter "
+ self.wfile.write(error_html.encode())
+ else:
+ # Fetch minute aggregates for the ticker and date
+ client = RESTClient(
+ trace=True
+ ) # POLYGON_API_KEY environment variable is used
+ try:
+ aggs = []
+ date_from = date
+ date_to = date
+ for a in client.list_aggs(
+ ticker,
+ 1,
+ "minute",
+ date_from,
+ date_to,
+ limit=50000,
+ ):
+ aggs.append(a)
+ # Prepare data for the chart
+ data = []
+ for agg in aggs:
+ if isinstance(agg, Agg) and isinstance(agg.timestamp, int):
+ new_record = [
+ agg.timestamp,
+ agg.open,
+ agg.high,
+ agg.low,
+ agg.close,
+ ]
+ data.append(new_record)
+ # Generate the HTML for the chart page
+ chart_html = """
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ """ % (
+ json.dumps(data),
+ ticker,
+ date,
+ ticker,
+ )
+ self.send_response(200)
+ self.send_header("Content-type", "text/html")
+ self.send_header("Access-Control-Allow-Origin", "*")
+ self.end_headers()
+ self.wfile.write(chart_html.encode())
+ except Exception as e:
+ # Handle exceptions
+ self.send_response(500)
+ self.send_header("Content-type", "text/html")
+ self.end_headers()
+ error_html = "Error fetching data: {} ".format(
+ str(e)
+ )
+ self.wfile.write(error_html.encode())
+ else:
+ # Serve files from the current directory
+ super().do_GET()
+
+
+def run_server():
+ with socketserver.TCPServer(("", PORT), handler) as httpd:
+ print("serving at port", PORT)
+ try:
+ httpd.serve_forever()
+ except KeyboardInterrupt:
+ print("\nExiting gracefully...")
+ httpd.shutdown()
+ httpd.server_close()
+
+
+if __name__ == "__main__":
+ run_server()
diff --git a/examples/tools/hunting-anomalies/query-lookup-table.py b/examples/tools/hunting-anomalies/query-lookup-table.py
new file mode 100644
index 00000000..38bb86cf
--- /dev/null
+++ b/examples/tools/hunting-anomalies/query-lookup-table.py
@@ -0,0 +1,63 @@
+import pickle
+import argparse
+
+# Parse command-line arguments
+parser = argparse.ArgumentParser(description="Anomaly Detection Script")
+parser.add_argument("date", type=str, help="Target date in YYYY-MM-DD format")
+args = parser.parse_args()
+
+# Load the lookup_table
+with open("lookup_table.pkl", "rb") as f:
+ lookup_table = pickle.load(f)
+
+# Threshold for considering an anomaly (e.g., 3 standard deviations)
+threshold_multiplier = 3
+
+# Date for which we want to find anomalies
+target_date_str = args.date
+
+# List to store anomalies
+anomalies = []
+
+# Iterate over all tickers in the lookup table
+for ticker, date_data in lookup_table.items():
+ if target_date_str in date_data:
+ data = date_data[target_date_str]
+ trades = data["trades"]
+ avg_trades = data["avg_trades"]
+ std_trades = data["std_trades"]
+ if avg_trades is not None and std_trades is not None and std_trades > 0:
+ z_score = (trades - avg_trades) / std_trades
+ if z_score > threshold_multiplier:
+ anomalies.append(
+ {
+ "ticker": ticker,
+ "date": target_date_str,
+ "trades": trades,
+ "avg_trades": avg_trades,
+ "std_trades": std_trades,
+ "z_score": z_score,
+ "close_price": data["close_price"],
+ "price_diff": data["price_diff"],
+ }
+ )
+
+# Sort anomalies by trades in descending order
+anomalies.sort(key=lambda x: x["trades"], reverse=True)
+
+# Print the anomalies with aligned columns
+print(f"\nAnomalies Found for {target_date_str}:\n")
+print(
+ f"{'Ticker':<10}{'Trades':>10}{'Avg Trades':>15}{'Std Dev':>10}{'Z-score':>10}{'Close Price':>12}{'Price Diff':>12}"
+)
+print("-" * 91)
+for anomaly in anomalies:
+ print(
+ f"{anomaly['ticker']:<10}"
+ f"{anomaly['trades']:>10.0f}"
+ f"{anomaly['avg_trades']:>15.2f}"
+ f"{anomaly['std_trades']:>10.2f}"
+ f"{anomaly['z_score']:>10.2f}"
+ f"{anomaly['close_price']:>12.2f}"
+ f"{anomaly['price_diff']:>12.2f}"
+ )
diff --git a/examples/tools/related-companies/data.json b/examples/tools/related-companies/data.json
new file mode 100644
index 00000000..2b63ea05
--- /dev/null
+++ b/examples/tools/related-companies/data.json
@@ -0,0 +1 @@
+{"nodes": [{"id": 1, "label": "MSFT"}, {"id": 2, "label": "GOOGL"}, {"id": 3, "label": "NVDA"}, {"id": 4, "label": "AMZN"}, {"id": 5, "label": "GOOG"}, {"id": 6, "label": "META"}, {"id": 7, "label": "TSLA"}, {"id": 8, "label": "AAPL"}, {"id": 9, "label": "CRM"}, {"id": 10, "label": "ORCL"}, {"id": 11, "label": "AMD"}, {"id": 12, "label": "NFLX"}, {"id": 13, "label": "WMT"}, {"id": 14, "label": "DIS"}, {"id": 15, "label": "SNAP"}, {"id": 16, "label": "SHOP"}, {"id": 17, "label": "INTC"}, {"id": 18, "label": "ANET"}, {"id": 19, "label": "RIVN"}, {"id": 20, "label": "GM"}, {"id": 21, "label": "F"}, {"id": 22, "label": "LCID"}, {"id": 23, "label": "WBD"}, {"id": 24, "label": "CMCSA"}, {"id": 25, "label": "PARA"}, {"id": 26, "label": "T"}, {"id": 27, "label": "ROKU"}], "edges": [{"from": 1, "to": 2}, {"from": 1, "to": 3}, {"from": 1, "to": 4}, {"from": 1, "to": 5}, {"from": 1, "to": 6}, {"from": 1, "to": 7}, {"from": 1, "to": 8}, {"from": 1, "to": 9}, {"from": 1, "to": 10}, {"from": 1, "to": 11}, {"from": 4, "to": 1}, {"from": 4, "to": 2}, {"from": 4, "to": 5}, {"from": 4, "to": 8}, {"from": 4, "to": 7}, {"from": 4, "to": 3}, {"from": 4, "to": 6}, {"from": 4, "to": 12}, {"from": 4, "to": 13}, {"from": 4, "to": 14}, {"from": 6, "to": 5}, {"from": 6, "to": 2}, {"from": 6, "to": 1}, {"from": 6, "to": 4}, {"from": 6, "to": 8}, {"from": 6, "to": 7}, {"from": 6, "to": 3}, {"from": 6, "to": 15}, {"from": 6, "to": 12}, {"from": 6, "to": 11}, {"from": 8, "to": 1}, {"from": 8, "to": 2}, {"from": 8, "to": 4}, {"from": 8, "to": 5}, {"from": 8, "to": 7}, {"from": 8, "to": 3}, {"from": 8, "to": 6}, {"from": 8, "to": 12}, {"from": 8, "to": 14}, {"from": 8, "to": 11}, {"from": 5, "to": 2}, {"from": 5, "to": 1}, {"from": 5, "to": 6}, {"from": 5, "to": 4}, {"from": 5, "to": 8}, {"from": 5, "to": 7}, {"from": 5, "to": 3}, {"from": 5, "to": 15}, {"from": 5, "to": 12}, {"from": 5, "to": 16}, {"from": 3, "to": 11}, {"from": 3, "to": 6}, {"from": 3, "to": 2}, {"from": 3, "to": 7}, {"from": 3, "to": 5}, {"from": 3, "to": 1}, {"from": 3, "to": 8}, {"from": 3, "to": 4}, {"from": 3, "to": 17}, {"from": 3, "to": 18}, {"from": 7, "to": 19}, {"from": 7, "to": 2}, {"from": 7, "to": 4}, {"from": 7, "to": 20}, {"from": 7, "to": 21}, {"from": 7, "to": 22}, {"from": 7, "to": 5}, {"from": 7, "to": 6}, {"from": 7, "to": 8}, {"from": 7, "to": 3}, {"from": 14, "to": 12}, {"from": 14, "to": 23}, {"from": 14, "to": 4}, {"from": 14, "to": 24}, {"from": 14, "to": 25}, {"from": 14, "to": 8}, {"from": 14, "to": 2}, {"from": 14, "to": 26}, {"from": 14, "to": 5}, {"from": 14, "to": 27}]}
\ No newline at end of file
diff --git a/examples/tools/related-companies/index.html b/examples/tools/related-companies/index.html
new file mode 100644
index 00000000..1b5ae182
--- /dev/null
+++ b/examples/tools/related-companies/index.html
@@ -0,0 +1,30 @@
+
+
+
+ Vis Network | Related Companies
+
+
+
+
+
+
+
+
diff --git a/examples/tools/related-companies/readme.md b/examples/tools/related-companies/readme.md
new file mode 100644
index 00000000..9f107550
--- /dev/null
+++ b/examples/tools/related-companies/readme.md
@@ -0,0 +1,36 @@
+# See Connections with the Related Companies API
+
+This repository contains the Python script and HTML file used in our tutorial to demonstrate how to identify and visualize relationships between companies using Polygon.io's Related Companies API. The tutorial showcases how to fetch related company data and create a dynamic network graph using Python and vis.js, providing insights into the interconnected corporate landscape.
+
+
+
+Please see the [tutorial](https://polygon.io/blog/related-companies-api) for more details.
+
+### Prerequisites
+
+- Python 3.8+
+- Have Polygon.io's [python client](https://github.com/polygon-io/client-python) installed
+- An active Polygon.io account with an API key
+
+### Repository Contents
+
+- `related-companies-demo.py`: Python script to fetch and process data from the Related Companies API.
+- `index.html`: HTML file for visualizing the data as a network graph using vis.js.
+
+### Running the Example
+
+To run the Python script, ensure you have Python installed and your API key ready. Execute the following command:
+
+```
+python related-companies-demo.py
+```
+
+The script will generate a `data.json` file, which contains the nodes and edges for the network graph.
+
+To visualize the relationships:
+
+1. Take the `nodes` and `edges` from the `data.json` file and replace them in the `index.html` file
+2. Open `index.html` in your web browser.
+3. The web page should display the network graph.
+
+For a complete step-by-step guide on setting up and exploring the capabilities of the Related Companies API, refer to our detailed [tutorial](https://polygon.io/blog/related-companies-api).
\ No newline at end of file
diff --git a/examples/tools/related-companies/related-companies-demo.py b/examples/tools/related-companies/related-companies-demo.py
new file mode 100644
index 00000000..221d5f0f
--- /dev/null
+++ b/examples/tools/related-companies/related-companies-demo.py
@@ -0,0 +1,40 @@
+from polygon import RESTClient
+import json
+
+
+def get_related_tickers():
+ client = RESTClient(trace=True)
+
+ # Fetch a limited list of tickers to keep the example manageable
+ main_tickers = ["MSFT", "AMZN", "META", "AAPL", "GOOG", "NVDA", "TSLA", "DIS"]
+
+ # Prepare data structures for nodes and edges
+ nodes = []
+ edges = []
+ id_map = {}
+ current_id = 1
+
+ # Iterate over each main ticker and find related tickers
+ for ticker in main_tickers:
+ if ticker not in id_map:
+ id_map[ticker] = current_id
+ nodes.append({"id": current_id, "label": ticker})
+ current_id += 1
+
+ related_companies = client.get_related_companies(ticker)
+ for company in related_companies:
+ related_ticker = company.ticker
+ if related_ticker not in id_map:
+ id_map[related_ticker] = current_id
+ nodes.append({"id": current_id, "label": related_ticker})
+ current_id += 1
+
+ edges.append({"from": id_map[ticker], "to": id_map[related_ticker]})
+
+ # Save the nodes and edges to a JSON file for web visualization
+ with open("data.json", "w") as f:
+ json.dump({"nodes": nodes, "edges": edges}, f)
+
+
+if __name__ == "__main__":
+ get_related_tickers()
diff --git a/examples/tools/related-companies/related-companies.png b/examples/tools/related-companies/related-companies.png
new file mode 100644
index 00000000..ac9b14c7
Binary files /dev/null and b/examples/tools/related-companies/related-companies.png differ
diff --git a/poetry.lock b/poetry.lock
index 5c87ae90..59cdd317 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
[[package]]
name = "alabaster"
@@ -44,33 +44,33 @@ pytz = ">=2015.7"
[[package]]
name = "black"
-version = "24.4.2"
+version = "24.8.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
- {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
- {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
- {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
- {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
- {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
- {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
- {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
- {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
- {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
- {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
- {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
- {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
- {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"},
- {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"},
- {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"},
- {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"},
- {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"},
- {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"},
- {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"},
- {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"},
- {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
- {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
+ {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
+ {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
+ {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
+ {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
+ {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
+ {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
+ {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
+ {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
+ {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
+ {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
+ {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
+ {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
+ {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"},
+ {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"},
+ {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"},
+ {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"},
+ {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"},
+ {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"},
+ {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"},
+ {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"},
+ {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
+ {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
]
[package.dependencies]
@@ -90,13 +90,13 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "certifi"
-version = "2024.7.4"
+version = "2024.12.14"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
- {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"},
+ {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"},
]
[[package]]
@@ -225,13 +225,13 @@ testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-chec
[[package]]
name = "jinja2"
-version = "3.1.4"
+version = "3.1.5"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
files = [
- {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
- {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
+ {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
]
[package.dependencies]
@@ -312,47 +312,53 @@ files = [
[[package]]
name = "mypy"
-version = "1.10.1"
+version = "1.13.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"},
- {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"},
- {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"},
- {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"},
- {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"},
- {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"},
- {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"},
- {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"},
- {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"},
- {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"},
- {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"},
- {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"},
- {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"},
- {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"},
- {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"},
- {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"},
- {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"},
- {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"},
- {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"},
- {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"},
- {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"},
- {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"},
- {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"},
- {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"},
- {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"},
- {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"},
- {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"},
+ {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
+ {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
+ {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
+ {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
+ {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
+ {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
+ {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
+ {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
+ {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
+ {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
+ {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
+ {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
+ {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
+ {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
+ {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
+ {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
+ {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
+ {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
+ {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
+ {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
+ {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
+ {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
+ {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
+ {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
+ {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
+ {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
+ {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
+ {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
+ {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
+ {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
+ {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
+ {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-typing-extensions = ">=4.1.0"
+typing-extensions = ">=4.6.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
+faster-cache = ["orjson"]
install-types = ["pip"]
mypyc = ["setuptools (>=50)"]
reports = ["lxml"]
@@ -384,62 +390,86 @@ six = ">=1.8.0"
[[package]]
name = "orjson"
-version = "3.10.6"
+version = "3.10.13"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"},
- {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"},
- {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"},
- {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"},
- {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"},
- {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"},
- {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"},
- {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"},
- {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"},
- {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"},
- {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"},
- {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"},
- {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"},
- {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"},
- {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"},
- {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"},
- {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"},
- {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"},
- {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"},
- {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"},
- {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"},
- {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"},
- {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"},
- {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"},
- {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"},
- {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"},
- {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"},
- {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"},
- {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"},
- {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"},
- {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"},
- {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"},
- {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"},
- {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"},
- {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"},
- {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"},
- {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"},
- {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"},
- {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"},
- {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"},
- {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"},
- {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"},
- {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"},
- {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"},
- {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"},
- {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"},
- {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"},
- {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"},
- {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"},
- {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"},
- {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"},
+ {file = "orjson-3.10.13-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1232c5e873a4d1638ef957c5564b4b0d6f2a6ab9e207a9b3de9de05a09d1d920"},
+ {file = "orjson-3.10.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26a0eca3035619fa366cbaf49af704c7cb1d4a0e6c79eced9f6a3f2437964b6"},
+ {file = "orjson-3.10.13-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d4b6acd7c9c829895e50d385a357d4b8c3fafc19c5989da2bae11783b0fd4977"},
+ {file = "orjson-3.10.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1884e53c6818686891cc6fc5a3a2540f2f35e8c76eac8dc3b40480fb59660b00"},
+ {file = "orjson-3.10.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a428afb5720f12892f64920acd2eeb4d996595bf168a26dd9190115dbf1130d"},
+ {file = "orjson-3.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba5b13b8739ce5b630c65cb1c85aedbd257bcc2b9c256b06ab2605209af75a2e"},
+ {file = "orjson-3.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cab83e67f6aabda1b45882254b2598b48b80ecc112968fc6483fa6dae609e9f0"},
+ {file = "orjson-3.10.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:62c3cc00c7e776c71c6b7b9c48c5d2701d4c04e7d1d7cdee3572998ee6dc57cc"},
+ {file = "orjson-3.10.13-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:dc03db4922e75bbc870b03fc49734cefbd50fe975e0878327d200022210b82d8"},
+ {file = "orjson-3.10.13-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:22f1c9a30b43d14a041a6ea190d9eca8a6b80c4beb0e8b67602c82d30d6eec3e"},
+ {file = "orjson-3.10.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b42f56821c29e697c68d7d421410d7c1d8f064ae288b525af6a50cf99a4b1200"},
+ {file = "orjson-3.10.13-cp310-cp310-win32.whl", hash = "sha256:0dbf3b97e52e093d7c3e93eb5eb5b31dc7535b33c2ad56872c83f0160f943487"},
+ {file = "orjson-3.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:46c249b4e934453be4ff2e518cd1adcd90467da7391c7a79eaf2fbb79c51e8c7"},
+ {file = "orjson-3.10.13-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a36c0d48d2f084c800763473020a12976996f1109e2fcb66cfea442fdf88047f"},
+ {file = "orjson-3.10.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0065896f85d9497990731dfd4a9991a45b0a524baec42ef0a63c34630ee26fd6"},
+ {file = "orjson-3.10.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92b4ec30d6025a9dcdfe0df77063cbce238c08d0404471ed7a79f309364a3d19"},
+ {file = "orjson-3.10.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a94542d12271c30044dadad1125ee060e7a2048b6c7034e432e116077e1d13d2"},
+ {file = "orjson-3.10.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3723e137772639af8adb68230f2aa4bcb27c48b3335b1b1e2d49328fed5e244c"},
+ {file = "orjson-3.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f00c7fb18843bad2ac42dc1ce6dd214a083c53f1e324a0fd1c8137c6436269b"},
+ {file = "orjson-3.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0e2759d3172300b2f892dee85500b22fca5ac49e0c42cfff101aaf9c12ac9617"},
+ {file = "orjson-3.10.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee948c6c01f6b337589c88f8e0bb11e78d32a15848b8b53d3f3b6fea48842c12"},
+ {file = "orjson-3.10.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:aa6fe68f0981fba0d4bf9cdc666d297a7cdba0f1b380dcd075a9a3dd5649a69e"},
+ {file = "orjson-3.10.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dbcd7aad6bcff258f6896abfbc177d54d9b18149c4c561114f47ebfe74ae6bfd"},
+ {file = "orjson-3.10.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2149e2fcd084c3fd584881c7f9d7f9e5ad1e2e006609d8b80649655e0d52cd02"},
+ {file = "orjson-3.10.13-cp311-cp311-win32.whl", hash = "sha256:89367767ed27b33c25c026696507c76e3d01958406f51d3a2239fe9e91959df2"},
+ {file = "orjson-3.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:dca1d20f1af0daff511f6e26a27354a424f0b5cf00e04280279316df0f604a6f"},
+ {file = "orjson-3.10.13-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a3614b00621c77f3f6487792238f9ed1dd8a42f2ec0e6540ee34c2d4e6db813a"},
+ {file = "orjson-3.10.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c976bad3996aa027cd3aef78aa57873f3c959b6c38719de9724b71bdc7bd14b"},
+ {file = "orjson-3.10.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f74d878d1efb97a930b8a9f9898890067707d683eb5c7e20730030ecb3fb930"},
+ {file = "orjson-3.10.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33ef84f7e9513fb13b3999c2a64b9ca9c8143f3da9722fbf9c9ce51ce0d8076e"},
+ {file = "orjson-3.10.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd2bcde107221bb9c2fa0c4aaba735a537225104173d7e19cf73f70b3126c993"},
+ {file = "orjson-3.10.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064b9dbb0217fd64a8d016a8929f2fae6f3312d55ab3036b00b1d17399ab2f3e"},
+ {file = "orjson-3.10.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0044b0b8c85a565e7c3ce0a72acc5d35cda60793edf871ed94711e712cb637d"},
+ {file = "orjson-3.10.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7184f608ad563032e398f311910bc536e62b9fbdca2041be889afcbc39500de8"},
+ {file = "orjson-3.10.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d36f689e7e1b9b6fb39dbdebc16a6f07cbe994d3644fb1c22953020fc575935f"},
+ {file = "orjson-3.10.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54433e421618cd5873e51c0e9d0b9fb35f7bf76eb31c8eab20b3595bb713cd3d"},
+ {file = "orjson-3.10.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e1ba0c5857dd743438acecc1cd0e1adf83f0a81fee558e32b2b36f89e40cee8b"},
+ {file = "orjson-3.10.13-cp312-cp312-win32.whl", hash = "sha256:a42b9fe4b0114b51eb5cdf9887d8c94447bc59df6dbb9c5884434eab947888d8"},
+ {file = "orjson-3.10.13-cp312-cp312-win_amd64.whl", hash = "sha256:3a7df63076435f39ec024bdfeb4c9767ebe7b49abc4949068d61cf4857fa6d6c"},
+ {file = "orjson-3.10.13-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2cdaf8b028a976ebab837a2c27b82810f7fc76ed9fb243755ba650cc83d07730"},
+ {file = "orjson-3.10.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48a946796e390cbb803e069472de37f192b7a80f4ac82e16d6eb9909d9e39d56"},
+ {file = "orjson-3.10.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d64f1db5ecbc21eb83097e5236d6ab7e86092c1cd4c216c02533332951afc"},
+ {file = "orjson-3.10.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:711878da48f89df194edd2ba603ad42e7afed74abcd2bac164685e7ec15f96de"},
+ {file = "orjson-3.10.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:cf16f06cb77ce8baf844bc222dbcb03838f61d0abda2c3341400c2b7604e436e"},
+ {file = "orjson-3.10.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8257c3fb8dd7b0b446b5e87bf85a28e4071ac50f8c04b6ce2d38cb4abd7dff57"},
+ {file = "orjson-3.10.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9c3a87abe6f849a4a7ac8a8a1dede6320a4303d5304006b90da7a3cd2b70d2c"},
+ {file = "orjson-3.10.13-cp313-cp313-win32.whl", hash = "sha256:527afb6ddb0fa3fe02f5d9fba4920d9d95da58917826a9be93e0242da8abe94a"},
+ {file = "orjson-3.10.13-cp313-cp313-win_amd64.whl", hash = "sha256:b5f7c298d4b935b222f52d6c7f2ba5eafb59d690d9a3840b7b5c5cda97f6ec5c"},
+ {file = "orjson-3.10.13-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e49333d1038bc03a25fdfe11c86360df9b890354bfe04215f1f54d030f33c342"},
+ {file = "orjson-3.10.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:003721c72930dbb973f25c5d8e68d0f023d6ed138b14830cc94e57c6805a2eab"},
+ {file = "orjson-3.10.13-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:63664bf12addb318dc8f032160e0f5dc17eb8471c93601e8f5e0d07f95003784"},
+ {file = "orjson-3.10.13-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6066729cf9552d70de297b56556d14b4f49c8f638803ee3c90fd212fa43cc6af"},
+ {file = "orjson-3.10.13-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a1152e2761025c5d13b5e1908d4b1c57f3797ba662e485ae6f26e4e0c466388"},
+ {file = "orjson-3.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b21d91c5c5ef8a201036d207b1adf3aa596b930b6ca3c71484dd11386cf6c3"},
+ {file = "orjson-3.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b12a63f48bb53dba8453d36ca2661f2330126d54e26c1661e550b32864b28ce3"},
+ {file = "orjson-3.10.13-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a5a7624ab4d121c7e035708c8dd1f99c15ff155b69a1c0affc4d9d8b551281ba"},
+ {file = "orjson-3.10.13-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:0fee076134398d4e6cb827002468679ad402b22269510cf228301b787fdff5ae"},
+ {file = "orjson-3.10.13-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ae537fcf330b3947e82c6ae4271e092e6cf16b9bc2cef68b14ffd0df1fa8832a"},
+ {file = "orjson-3.10.13-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f81b26c03f5fb5f0d0ee48d83cea4d7bc5e67e420d209cc1a990f5d1c62f9be0"},
+ {file = "orjson-3.10.13-cp38-cp38-win32.whl", hash = "sha256:0bc858086088b39dc622bc8219e73d3f246fb2bce70a6104abd04b3a080a66a8"},
+ {file = "orjson-3.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:3ca6f17467ebbd763f8862f1d89384a5051b461bb0e41074f583a0ebd7120e8e"},
+ {file = "orjson-3.10.13-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4a11532cbfc2f5752c37e84863ef8435b68b0e6d459b329933294f65fa4bda1a"},
+ {file = "orjson-3.10.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c96d2fb80467d1d0dfc4d037b4e1c0f84f1fe6229aa7fea3f070083acef7f3d7"},
+ {file = "orjson-3.10.13-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dda4ba4d3e6f6c53b6b9c35266788053b61656a716a7fef5c884629c2a52e7aa"},
+ {file = "orjson-3.10.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f998bbf300690be881772ee9c5281eb9c0044e295bcd4722504f5b5c6092ff"},
+ {file = "orjson-3.10.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1cc42ed75b585c0c4dc5eb53a90a34ccb493c09a10750d1a1f9b9eff2bd12"},
+ {file = "orjson-3.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03b0f29d485411e3c13d79604b740b14e4e5fb58811743f6f4f9693ee6480a8f"},
+ {file = "orjson-3.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:233aae4474078d82f425134bb6a10fb2b3fc5a1a1b3420c6463ddd1b6a97eda8"},
+ {file = "orjson-3.10.13-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e384e330a67cf52b3597ee2646de63407da6f8fc9e9beec3eaaaef5514c7a1c9"},
+ {file = "orjson-3.10.13-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4222881d0aab76224d7b003a8e5fdae4082e32c86768e0e8652de8afd6c4e2c1"},
+ {file = "orjson-3.10.13-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e400436950ba42110a20c50c80dff4946c8e3ec09abc1c9cf5473467e83fd1c5"},
+ {file = "orjson-3.10.13-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f47c9e7d224b86ffb086059cdcf634f4b3f32480f9838864aa09022fe2617ce2"},
+ {file = "orjson-3.10.13-cp39-cp39-win32.whl", hash = "sha256:a9ecea472f3eb653e1c0a3d68085f031f18fc501ea392b98dcca3e87c24f9ebe"},
+ {file = "orjson-3.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:5385935a73adce85cc7faac9d396683fd813566d3857fa95a0b521ef84a5b588"},
+ {file = "orjson-3.10.13.tar.gz", hash = "sha256:eb9bfb14ab8f68d9d9492d4817ae497788a15fd7da72e14dfabc289c3bb088ec"},
]
[[package]]
@@ -492,13 +522,13 @@ test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock
[[package]]
name = "pook"
-version = "2.0.0"
+version = "2.0.1"
description = "HTTP traffic mocking and expectations made easy"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pook-2.0.0-py3-none-any.whl", hash = "sha256:b3993cf00b8335f19b407fca39febd048c97749eb7c06eaddd9fbaff3b0a1ac3"},
- {file = "pook-2.0.0.tar.gz", hash = "sha256:b106ebc088417fa7b68d1f6ee21a9720fd171ea96d4b86ef308eaffac1e5c4f8"},
+ {file = "pook-2.0.1-py3-none-any.whl", hash = "sha256:30d73c95e0520f45c1e3889f3bf486e990b6f04b4915aa9daf86cf0d8136b2e1"},
+ {file = "pook-2.0.1.tar.gz", hash = "sha256:e04c0e698f256438b4dfbf3ab1b27559f0ec25e42176823167f321f4e8b9c9e4"},
]
[package.dependencies]
@@ -661,22 +691,22 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.4.2)", "diff-cover (>=8.0.3)",
[[package]]
name = "sphinx-rtd-theme"
-version = "2.0.0"
+version = "3.0.2"
description = "Read the Docs theme for Sphinx"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
files = [
- {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"},
- {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"},
+ {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"},
+ {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"},
]
[package.dependencies]
-docutils = "<0.21"
-sphinx = ">=5,<8"
+docutils = ">0.18,<0.22"
+sphinx = ">=6,<9"
sphinxcontrib-jquery = ">=4,<5"
[package.extras]
-dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"]
+dev = ["bump2version", "transifex-client", "twine", "wheel"]
[[package]]
name = "sphinxcontrib-applehelp"
@@ -805,13 +835,13 @@ files = [
[[package]]
name = "types-setuptools"
-version = "70.3.0.20240710"
+version = "75.6.0.20241223"
description = "Typing stubs for setuptools"
optional = false
python-versions = ">=3.8"
files = [
- {file = "types-setuptools-70.3.0.20240710.tar.gz", hash = "sha256:842cbf399812d2b65042c9d6ff35113bbf282dee38794779aa1f94e597bafc35"},
- {file = "types_setuptools-70.3.0.20240710-py3-none-any.whl", hash = "sha256:bd0db2a4b9f2c49ac5564be4e0fb3125c4c46b1f73eafdcbceffa5b005cceca4"},
+ {file = "types_setuptools-75.6.0.20241223-py3-none-any.whl", hash = "sha256:7cbfd3bf2944f88bbcdd321b86ddd878232a277be95d44c78a53585d78ebc2f6"},
+ {file = "types_setuptools-75.6.0.20241223.tar.gz", hash = "sha256:d9478a985057ed48a994c707f548e55aababa85fe1c9b212f43ab5a1fffd3211"},
]
[[package]]
@@ -827,24 +857,24 @@ files = [
[[package]]
name = "typing-extensions"
-version = "4.4.0"
-description = "Backported and Experimental Type Hints for Python 3.7+"
+version = "4.12.2"
+description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
- {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "urllib3"
-version = "2.2.2"
+version = "2.2.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
- {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
- {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
+ {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
]
[package.extras]
@@ -855,83 +885,97 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "websockets"
-version = "12.0"
+version = "13.1"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false
python-versions = ">=3.8"
files = [
- {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
- {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
- {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"},
- {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"},
- {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"},
- {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"},
- {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"},
- {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"},
- {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"},
- {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"},
- {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"},
- {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"},
- {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"},
- {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"},
- {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"},
- {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"},
- {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"},
- {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"},
- {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"},
- {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"},
- {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"},
- {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"},
- {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"},
- {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"},
- {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"},
- {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"},
- {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"},
- {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"},
- {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"},
- {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"},
- {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"},
- {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"},
- {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"},
- {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"},
- {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"},
- {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"},
- {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"},
- {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"},
- {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"},
- {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"},
- {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"},
- {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"},
- {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"},
- {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"},
- {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"},
- {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"},
- {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"},
- {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"},
- {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"},
- {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"},
- {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"},
- {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"},
- {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"},
- {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"},
- {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"},
- {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"},
- {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"},
- {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
- {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
- {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
- {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"},
- {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"},
- {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"},
- {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"},
- {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"},
- {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"},
- {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"},
- {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"},
- {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"},
- {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"},
- {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
- {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
+ {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"},
+ {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"},
+ {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"},
+ {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"},
+ {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"},
+ {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"},
+ {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"},
+ {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"},
+ {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"},
+ {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"},
+ {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"},
+ {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"},
+ {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"},
+ {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"},
+ {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"},
+ {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"},
+ {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"},
+ {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"},
+ {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"},
+ {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"},
+ {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"},
+ {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"},
+ {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"},
+ {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"},
+ {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"},
+ {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"},
+ {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"},
+ {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"},
+ {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"},
+ {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"},
+ {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"},
+ {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"},
+ {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"},
+ {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"},
+ {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"},
+ {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"},
+ {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"},
+ {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"},
+ {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"},
+ {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"},
+ {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"},
+ {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"},
+ {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"},
+ {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"},
+ {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"},
+ {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"},
+ {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"},
+ {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"},
+ {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"},
+ {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"},
+ {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"},
+ {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"},
+ {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"},
+ {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"},
+ {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"},
+ {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"},
+ {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"},
+ {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"},
+ {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"},
+ {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"},
+ {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"},
+ {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"},
+ {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"},
+ {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"},
+ {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"},
+ {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"},
+ {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"},
+ {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"},
+ {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"},
+ {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"},
+ {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"},
+ {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"},
+ {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"},
+ {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"},
+ {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"},
+ {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"},
+ {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"},
+ {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"},
+ {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"},
+ {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"},
+ {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"},
+ {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"},
+ {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"},
+ {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"},
+ {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"},
+ {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"},
]
[[package]]
@@ -963,4 +1007,4 @@ test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-it
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
-content-hash = "e3fa63991d39989af47ef9380ec497c17c30d57bcf4a6c59d8e2146d22d27828"
+content-hash = "183ba57707ae02b4b27fc68f294181edf9f15d33bd3c24dcfb58c9fdc4e3e93a"
diff --git a/polygon/rest/models/dividends.py b/polygon/rest/models/dividends.py
index ef9adff0..68db98a9 100644
--- a/polygon/rest/models/dividends.py
+++ b/polygon/rest/models/dividends.py
@@ -5,6 +5,7 @@
@modelclass
class Dividend:
"Dividend contains data for a historical cash dividend, including the ticker symbol, declaration date, ex-dividend date, record date, pay date, frequency, and amount."
+ id: Optional[int] = None
cash_amount: Optional[float] = None
currency: Optional[str] = None
declaration_date: Optional[str] = None
diff --git a/polygon/rest/models/financials.py b/polygon/rest/models/financials.py
index 1a480c48..5443e4f6 100644
--- a/polygon/rest/models/financials.py
+++ b/polygon/rest/models/financials.py
@@ -1,328 +1,503 @@
-from typing import Optional, Dict
+from dataclasses import dataclass
+from typing import Any, Dict, List, Optional
from ...modelclass import modelclass
@modelclass
+@dataclass
class DataPoint:
- "An individual financial data point."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
+ """Represents a single numeric or textual data point in the financials."""
- @staticmethod
- def from_dict(d):
- return DataPoint(**d)
-
-
-@modelclass
-class ExchangeGainsLosses:
- "Contains exchange gains losses data for a cash flow statement."
- formula: Optional[str] = None
label: Optional[str] = None
order: Optional[int] = None
unit: Optional[str] = None
value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return ExchangeGainsLosses(**d)
-
-
-@modelclass
-class NetCashFlow:
- "Contains net cash flow data for a cash flow statement."
+ derived_from: Optional[List[str]] = None
formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
+ source: Optional[Dict[str, str]] = None
xpath: Optional[str] = None
@staticmethod
- def from_dict(d):
- return NetCashFlow(**d)
+ def from_dict(d: Optional[Dict[str, Any]]) -> "DataPoint":
+ if not d:
+ return DataPoint()
+ return DataPoint(
+ label=d.get("label"),
+ order=d.get("order"),
+ unit=d.get("unit"),
+ value=d.get("value"),
+ derived_from=d.get("derived_from"),
+ formula=d.get("formula"),
+ source=d.get("source"),
+ xpath=d.get("xpath"),
+ )
+@dataclass
@modelclass
-class NetCashFlowFromFinancingActivities:
- "Contains net cash flow from financing activities data for a cash flow statement."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
+class BalanceSheet:
+ assets: Optional[DataPoint] = None
+ current_assets: Optional[DataPoint] = None
+ cash: Optional[DataPoint] = None
+ accounts_receivable: Optional[DataPoint] = None
+ inventory: Optional[DataPoint] = None
+ prepaid_expenses: Optional[DataPoint] = None
+ other_current_assets: Optional[DataPoint] = None
+ noncurrent_assets: Optional[DataPoint] = None
+ long_term_investments: Optional[DataPoint] = None
+ fixed_assets: Optional[DataPoint] = None
+ intangible_assets: Optional[DataPoint] = None
+ noncurrent_prepaid_expense: Optional[DataPoint] = None
+ other_noncurrent_assets: Optional[DataPoint] = None
+ liabilities: Optional[DataPoint] = None
+ current_liabilities: Optional[DataPoint] = None
+ accounts_payable: Optional[DataPoint] = None
+ interest_payable: Optional[DataPoint] = None
+ wages: Optional[DataPoint] = None
+ other_current_liabilities: Optional[DataPoint] = None
+ noncurrent_liabilities: Optional[DataPoint] = None
+ long_term_debt: Optional[DataPoint] = None
+ other_noncurrent_liabilities: Optional[DataPoint] = None
+ commitments_and_contingencies: Optional[DataPoint] = None
+ redeemable_noncontrolling_interest: Optional[DataPoint] = None
+ redeemable_noncontrolling_interest_common: Optional[DataPoint] = None
+ redeemable_noncontrolling_interest_other: Optional[DataPoint] = None
+ redeemable_noncontrolling_interest_preferred: Optional[DataPoint] = None
+ equity: Optional[DataPoint] = None
+ equity_attributable_to_noncontrolling_interest: Optional[DataPoint] = None
+ equity_attributable_to_parent: Optional[DataPoint] = None
+ temporary_equity: Optional[DataPoint] = None
+ temporary_equity_attributable_to_parent: Optional[DataPoint] = None
+ liabilities_and_equity: Optional[DataPoint] = None
@staticmethod
- def from_dict(d):
- return NetCashFlowFromFinancingActivities(**d)
+ def from_dict(d: Optional[Dict[str, Any]]) -> "BalanceSheet":
+ if not d:
+ return BalanceSheet()
+ return BalanceSheet(
+ assets=DataPoint.from_dict(d.get("assets")),
+ current_assets=DataPoint.from_dict(d.get("current_assets")),
+ cash=DataPoint.from_dict(d.get("cash")),
+ accounts_receivable=DataPoint.from_dict(d.get("accounts_receivable")),
+ inventory=DataPoint.from_dict(d.get("inventory")),
+ prepaid_expenses=DataPoint.from_dict(d.get("prepaid_expenses")),
+ other_current_assets=DataPoint.from_dict(d.get("other_current_assets")),
+ noncurrent_assets=DataPoint.from_dict(d.get("noncurrent_assets")),
+ long_term_investments=DataPoint.from_dict(d.get("long_term_investments")),
+ fixed_assets=DataPoint.from_dict(d.get("fixed_assets")),
+ intangible_assets=DataPoint.from_dict(d.get("intangible_assets")),
+ noncurrent_prepaid_expense=DataPoint.from_dict(
+ d.get("noncurrent_prepaid_expense")
+ ),
+ other_noncurrent_assets=DataPoint.from_dict(
+ d.get("other_noncurrent_assets")
+ ),
+ liabilities=DataPoint.from_dict(d.get("liabilities")),
+ current_liabilities=DataPoint.from_dict(d.get("current_liabilities")),
+ accounts_payable=DataPoint.from_dict(d.get("accounts_payable")),
+ interest_payable=DataPoint.from_dict(d.get("interest_payable")),
+ wages=DataPoint.from_dict(d.get("wages")),
+ other_current_liabilities=DataPoint.from_dict(
+ d.get("other_current_liabilities")
+ ),
+ noncurrent_liabilities=DataPoint.from_dict(d.get("noncurrent_liabilities")),
+ long_term_debt=DataPoint.from_dict(d.get("long_term_debt")),
+ other_noncurrent_liabilities=DataPoint.from_dict(
+ d.get("other_noncurrent_liabilities")
+ ),
+ commitments_and_contingencies=DataPoint.from_dict(
+ d.get("commitments_and_contingencies")
+ ),
+ redeemable_noncontrolling_interest=DataPoint.from_dict(
+ d.get("redeemable_noncontrolling_interest")
+ ),
+ redeemable_noncontrolling_interest_common=DataPoint.from_dict(
+ d.get("redeemable_noncontrolling_interest_common")
+ ),
+ redeemable_noncontrolling_interest_other=DataPoint.from_dict(
+ d.get("redeemable_noncontrolling_interest_other")
+ ),
+ redeemable_noncontrolling_interest_preferred=DataPoint.from_dict(
+ d.get("redeemable_noncontrolling_interest_preferred")
+ ),
+ equity=DataPoint.from_dict(d.get("equity")),
+ equity_attributable_to_noncontrolling_interest=DataPoint.from_dict(
+ d.get("equity_attributable_to_noncontrolling_interest")
+ ),
+ equity_attributable_to_parent=DataPoint.from_dict(
+ d.get("equity_attributable_to_parent")
+ ),
+ temporary_equity=DataPoint.from_dict(d.get("temporary_equity")),
+ temporary_equity_attributable_to_parent=DataPoint.from_dict(
+ d.get("temporary_equity_attributable_to_parent")
+ ),
+ liabilities_and_equity=DataPoint.from_dict(d.get("liabilities_and_equity")),
+ )
+@dataclass
@modelclass
class CashFlowStatement:
- "Contains cash flow statement data."
- exchange_gains_losses: Optional[ExchangeGainsLosses] = None
- net_cash_flow: Optional[NetCashFlow] = None
- net_cash_flow_from_financing_activities: Optional[
- NetCashFlowFromFinancingActivities
- ] = None
+ net_cash_flow_from_operating_activities: Optional[DataPoint] = None
+ net_cash_flow_from_operating_activities_continuing: Optional[DataPoint] = None
+ net_cash_flow_from_operating_activities_discontinued: Optional[DataPoint] = None
+ net_cash_flow_from_investing_activities: Optional[DataPoint] = None
+ net_cash_flow_from_investing_activities_continuing: Optional[DataPoint] = None
+ net_cash_flow_from_investing_activities_discontinued: Optional[DataPoint] = None
+ net_cash_flow_from_financing_activities: Optional[DataPoint] = None
+ net_cash_flow_from_financing_activities_continuing: Optional[DataPoint] = None
+ net_cash_flow_from_financing_activities_discontinued: Optional[DataPoint] = None
+ exchange_gains_losses: Optional[DataPoint] = None
+ net_cash_flow: Optional[DataPoint] = None
+ net_cash_flow_continuing: Optional[DataPoint] = None
+ net_cash_flow_discontinued: Optional[DataPoint] = None
@staticmethod
- def from_dict(d):
+ def from_dict(d: Optional[Dict[str, Any]]) -> "CashFlowStatement":
+ if not d:
+ return CashFlowStatement()
return CashFlowStatement(
- exchange_gains_losses=(
- None
- if "exchange_gains_losses" not in d
- else ExchangeGainsLosses.from_dict(d["exchange_gains_losses"])
- ),
- net_cash_flow=(
- None
- if "net_cash_flow" not in d
- else NetCashFlow.from_dict(d["net_cash_flow"])
- ),
- net_cash_flow_from_financing_activities=(
- None
- if "net_cash_flow_from_financing_activities" not in d
- else NetCashFlowFromFinancingActivities.from_dict(
- d["net_cash_flow_from_financing_activities"]
- )
+ net_cash_flow_from_operating_activities=DataPoint.from_dict(
+ d.get("net_cash_flow_from_operating_activities")
+ ),
+ net_cash_flow_from_operating_activities_continuing=DataPoint.from_dict(
+ d.get("net_cash_flow_from_operating_activities_continuing")
+ ),
+ net_cash_flow_from_operating_activities_discontinued=DataPoint.from_dict(
+ d.get("net_cash_flow_from_operating_activities_discontinued")
+ ),
+ net_cash_flow_from_investing_activities=DataPoint.from_dict(
+ d.get("net_cash_flow_from_investing_activities")
+ ),
+ net_cash_flow_from_investing_activities_continuing=DataPoint.from_dict(
+ d.get("net_cash_flow_from_investing_activities_continuing")
+ ),
+ net_cash_flow_from_investing_activities_discontinued=DataPoint.from_dict(
+ d.get("net_cash_flow_from_investing_activities_discontinued")
+ ),
+ net_cash_flow_from_financing_activities=DataPoint.from_dict(
+ d.get("net_cash_flow_from_financing_activities")
+ ),
+ net_cash_flow_from_financing_activities_continuing=DataPoint.from_dict(
+ d.get("net_cash_flow_from_financing_activities_continuing")
+ ),
+ net_cash_flow_from_financing_activities_discontinued=DataPoint.from_dict(
+ d.get("net_cash_flow_from_financing_activities_discontinued")
+ ),
+ exchange_gains_losses=DataPoint.from_dict(d.get("exchange_gains_losses")),
+ net_cash_flow=DataPoint.from_dict(d.get("net_cash_flow")),
+ net_cash_flow_continuing=DataPoint.from_dict(
+ d.get("net_cash_flow_continuing")
+ ),
+ net_cash_flow_discontinued=DataPoint.from_dict(
+ d.get("net_cash_flow_discontinued")
),
)
-@modelclass
-class ComprehensiveIncomeLoss:
- "Contains comprehensive income loss data for comprehensive income."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return ComprehensiveIncomeLoss(**d)
-
-
-@modelclass
-class ComprehensiveIncomeLossAttributableToParent:
- "Contains comprehensive income loss attributable to parent data for comprehensive income."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return ComprehensiveIncomeLossAttributableToParent(**d)
-
-
-@modelclass
-class OtherComprehensiveIncomeLoss:
- "Contains other comprehensive income loss data for comprehensive income."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return OtherComprehensiveIncomeLoss(**d)
-
-
+@dataclass
@modelclass
class ComprehensiveIncome:
- "Contains comprehensive income data."
- comprehensive_income_loss: Optional[ComprehensiveIncomeLoss] = None
- comprehensive_income_loss_attributable_to_parent: Optional[
- ComprehensiveIncomeLossAttributableToParent
+ comprehensive_income_loss: Optional[DataPoint] = None
+ comprehensive_income_loss_attributable_to_noncontrolling_interest: Optional[
+ DataPoint
+ ] = None
+ comprehensive_income_loss_attributable_to_parent: Optional[DataPoint] = None
+ other_comprehensive_income_loss: Optional[DataPoint] = None
+ other_comprehensive_income_loss_attributable_to_noncontrolling_interest: Optional[
+ DataPoint
] = None
- other_comprehensive_income_loss: Optional[OtherComprehensiveIncomeLoss] = None
+ other_comprehensive_income_loss_attributable_to_parent: Optional[DataPoint] = None
@staticmethod
- def from_dict(d):
+ def from_dict(d: Optional[Dict[str, Any]]) -> "ComprehensiveIncome":
+ if not d:
+ return ComprehensiveIncome()
return ComprehensiveIncome(
- comprehensive_income_loss=(
- None
- if "comprehensive_income_loss" not in d
- else ComprehensiveIncomeLoss.from_dict(d["comprehensive_income_loss"])
- ),
- comprehensive_income_loss_attributable_to_parent=(
- None
- if "comprehensive_income_loss_attributable_to_parent" not in d
- else ComprehensiveIncomeLossAttributableToParent.from_dict(
- d["comprehensive_income_loss_attributable_to_parent"]
+ comprehensive_income_loss=DataPoint.from_dict(
+ d.get("comprehensive_income_loss")
+ ),
+ comprehensive_income_loss_attributable_to_noncontrolling_interest=DataPoint.from_dict(
+ d.get(
+ "comprehensive_income_loss_attributable_to_noncontrolling_interest"
)
),
- other_comprehensive_income_loss=(
- None
- if "other_comprehensive_income_loss" not in d
- else OtherComprehensiveIncomeLoss.from_dict(
- d["other_comprehensive_income_loss"]
+ comprehensive_income_loss_attributable_to_parent=DataPoint.from_dict(
+ d.get("comprehensive_income_loss_attributable_to_parent")
+ ),
+ other_comprehensive_income_loss=DataPoint.from_dict(
+ d.get("other_comprehensive_income_loss")
+ ),
+ other_comprehensive_income_loss_attributable_to_noncontrolling_interest=DataPoint.from_dict(
+ d.get(
+ "other_comprehensive_income_loss_attributable_to_noncontrolling_interest"
)
),
+ other_comprehensive_income_loss_attributable_to_parent=DataPoint.from_dict(
+ d.get("other_comprehensive_income_loss_attributable_to_parent")
+ ),
)
-@modelclass
-class BasicEarningsPerShare:
- "Contains basic earning per share data for an income statement."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return BasicEarningsPerShare(**d)
-
-
-@modelclass
-class CostOfRevenue:
- "Contains cost of revenue data for an income statement."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return CostOfRevenue(**d)
-
-
-@modelclass
-class GrossProfit:
- "Contains gross profit data for an income statement."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return GrossProfit(**d)
-
-
-@modelclass
-class OperatingExpenses:
- "Contains operating expenses data for an income statement."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return OperatingExpenses(**d)
-
-
-@modelclass
-class Revenues:
- "Contains revenues data for an income statement."
- formula: Optional[str] = None
- label: Optional[str] = None
- order: Optional[int] = None
- unit: Optional[str] = None
- value: Optional[float] = None
- xpath: Optional[str] = None
-
- @staticmethod
- def from_dict(d):
- return Revenues(**d)
-
-
+@dataclass
@modelclass
class IncomeStatement:
- "Contains income statement data."
- basic_earnings_per_share: Optional[BasicEarningsPerShare] = None
- cost_of_revenue: Optional[CostOfRevenue] = None
- gross_profit: Optional[GrossProfit] = None
- operating_expenses: Optional[OperatingExpenses] = None
- revenues: Optional[Revenues] = None
+ revenues: Optional[DataPoint] = None
+ benefits_costs_expenses: Optional[DataPoint] = None
+ cost_of_revenue: Optional[DataPoint] = None
+ cost_of_revenue_goods: Optional[DataPoint] = None
+ cost_of_revenue_services: Optional[DataPoint] = None
+ costs_and_expenses: Optional[DataPoint] = None
+ gross_profit: Optional[DataPoint] = None
+ gain_loss_on_sale_properties_net_tax: Optional[DataPoint] = None
+ nonoperating_income_loss: Optional[DataPoint] = None
+ operating_expenses: Optional[DataPoint] = None
+ selling_general_and_administrative_expenses: Optional[DataPoint] = None
+ depreciation_and_amortization: Optional[DataPoint] = None
+ research_and_development: Optional[DataPoint] = None
+ other_operating_expenses: Optional[DataPoint] = None
+ operating_income_loss: Optional[DataPoint] = None
+ other_operating_income_expenses: Optional[DataPoint] = None
+ income_loss_before_equity_method_investments: Optional[DataPoint] = None
+ income_loss_from_continuing_operations_after_tax: Optional[DataPoint] = None
+ income_loss_from_continuing_operations_before_tax: Optional[DataPoint] = None
+ income_loss_from_discontinued_operations_net_of_tax: Optional[DataPoint] = None
+ income_loss_from_discontinued_operations_net_of_tax_adjustment_to_prior_year_gain_loss_on_disposal: Optional[
+ DataPoint
+ ] = None
+ income_loss_from_discontinued_operations_net_of_tax_during_phase_out: Optional[
+ DataPoint
+ ] = None
+ income_loss_from_discontinued_operations_net_of_tax_gain_loss_on_disposal: Optional[
+ DataPoint
+ ] = None
+ income_loss_from_discontinued_operations_net_of_tax_provision_for_gain_loss_on_disposal: Optional[
+ DataPoint
+ ] = None
+ income_loss_from_equity_method_investments: Optional[DataPoint] = None
+ income_tax_expense_benefit: Optional[DataPoint] = None
+ income_tax_expense_benefit_current: Optional[DataPoint] = None
+ income_tax_expense_benefit_deferred: Optional[DataPoint] = None
+ interest_and_debt_expense: Optional[DataPoint] = None
+ interest_and_dividend_income_operating: Optional[DataPoint] = None
+ interest_expense_operating: Optional[DataPoint] = None
+ interest_income_expense_after_provision_for_losses: Optional[DataPoint] = None
+ interest_income_expense_operating_net: Optional[DataPoint] = None
+ noninterest_expense: Optional[DataPoint] = None
+ noninterest_income: Optional[DataPoint] = None
+ provision_for_loan_lease_and_other_losses: Optional[DataPoint] = None
+ net_income_loss: Optional[DataPoint] = None
+ net_income_loss_attributable_to_noncontrolling_interest: Optional[DataPoint] = None
+ net_income_loss_attributable_to_nonredeemable_noncontrolling_interest: Optional[
+ DataPoint
+ ] = None
+ net_income_loss_attributable_to_parent: Optional[DataPoint] = None
+ net_income_loss_attributable_to_redeemable_noncontrolling_interest: Optional[
+ DataPoint
+ ] = None
+ net_income_loss_available_to_common_stockholders_basic: Optional[DataPoint] = None
+ participating_securities_distributed_and_undistributed_earnings_loss_basic: (
+ Optional[DataPoint]
+ ) = (None)
+ undistributed_earnings_loss_allocated_to_participating_securities_basic: Optional[
+ DataPoint
+ ] = None
+ preferred_stock_dividends_and_other_adjustments: Optional[DataPoint] = None
+ basic_earnings_per_share: Optional[DataPoint] = None
+ diluted_earnings_per_share: Optional[DataPoint] = None
+ basic_average_shares: Optional[DataPoint] = None
+ diluted_average_shares: Optional[DataPoint] = None
+ common_stock_dividends: Optional[DataPoint] = None
@staticmethod
- def from_dict(d):
+ def from_dict(d: Optional[Dict[str, Any]]) -> "IncomeStatement":
+ if not d:
+ return IncomeStatement()
return IncomeStatement(
- basic_earnings_per_share=(
- None
- if "basic_earnings_per_share" not in d
- else BasicEarningsPerShare.from_dict(d["basic_earnings_per_share"])
- ),
- cost_of_revenue=(
- None
- if "cost_of_revenue" not in d
- else CostOfRevenue.from_dict(d["cost_of_revenue"])
- ),
- gross_profit=(
- None
- if "gross_profit" not in d
- else GrossProfit.from_dict(d["gross_profit"])
- ),
- operating_expenses=(
- None
- if "operating_expenses" not in d
- else OperatingExpenses.from_dict(d["operating_expenses"])
- ),
- revenues=None if "revenues" not in d else Revenues.from_dict(d["revenues"]),
+ revenues=DataPoint.from_dict(d.get("revenues")),
+ benefits_costs_expenses=DataPoint.from_dict(
+ d.get("benefits_costs_expenses")
+ ),
+ cost_of_revenue=DataPoint.from_dict(d.get("cost_of_revenue")),
+ cost_of_revenue_goods=DataPoint.from_dict(d.get("cost_of_revenue_goods")),
+ cost_of_revenue_services=DataPoint.from_dict(
+ d.get("cost_of_revenue_services")
+ ),
+ costs_and_expenses=DataPoint.from_dict(d.get("costs_and_expenses")),
+ gross_profit=DataPoint.from_dict(d.get("gross_profit")),
+ gain_loss_on_sale_properties_net_tax=DataPoint.from_dict(
+ d.get("gain_loss_on_sale_properties_net_tax")
+ ),
+ nonoperating_income_loss=DataPoint.from_dict(
+ d.get("nonoperating_income_loss")
+ ),
+ operating_expenses=DataPoint.from_dict(d.get("operating_expenses")),
+ selling_general_and_administrative_expenses=DataPoint.from_dict(
+ d.get("selling_general_and_administrative_expenses")
+ ),
+ depreciation_and_amortization=DataPoint.from_dict(
+ d.get("depreciation_and_amortization")
+ ),
+ research_and_development=DataPoint.from_dict(
+ d.get("research_and_development")
+ ),
+ other_operating_expenses=DataPoint.from_dict(
+ d.get("other_operating_expenses")
+ ),
+ operating_income_loss=DataPoint.from_dict(d.get("operating_income_loss")),
+ other_operating_income_expenses=DataPoint.from_dict(
+ d.get("other_operating_income_expenses")
+ ),
+ income_loss_before_equity_method_investments=DataPoint.from_dict(
+ d.get("income_loss_before_equity_method_investments")
+ ),
+ income_loss_from_continuing_operations_after_tax=DataPoint.from_dict(
+ d.get("income_loss_from_continuing_operations_after_tax")
+ ),
+ income_loss_from_continuing_operations_before_tax=DataPoint.from_dict(
+ d.get("income_loss_from_continuing_operations_before_tax")
+ ),
+ income_loss_from_discontinued_operations_net_of_tax=DataPoint.from_dict(
+ d.get("income_loss_from_discontinued_operations_net_of_tax")
+ ),
+ income_loss_from_discontinued_operations_net_of_tax_adjustment_to_prior_year_gain_loss_on_disposal=DataPoint.from_dict(
+ d.get(
+ "income_loss_from_discontinued_operations_net_of_tax_adjustment_to_prior_year_gain_loss_on_disposal"
+ )
+ ),
+ income_loss_from_discontinued_operations_net_of_tax_during_phase_out=DataPoint.from_dict(
+ d.get(
+ "income_loss_from_discontinued_operations_net_of_tax_during_phase_out"
+ )
+ ),
+ income_loss_from_discontinued_operations_net_of_tax_gain_loss_on_disposal=DataPoint.from_dict(
+ d.get(
+ "income_loss_from_discontinued_operations_net_of_tax_gain_loss_on_disposal"
+ )
+ ),
+ income_loss_from_discontinued_operations_net_of_tax_provision_for_gain_loss_on_disposal=DataPoint.from_dict(
+ d.get(
+ "income_loss_from_discontinued_operations_net_of_tax_provision_for_gain_loss_on_disposal"
+ )
+ ),
+ income_loss_from_equity_method_investments=DataPoint.from_dict(
+ d.get("income_loss_from_equity_method_investments")
+ ),
+ income_tax_expense_benefit=DataPoint.from_dict(
+ d.get("income_tax_expense_benefit")
+ ),
+ income_tax_expense_benefit_current=DataPoint.from_dict(
+ d.get("income_tax_expense_benefit_current")
+ ),
+ income_tax_expense_benefit_deferred=DataPoint.from_dict(
+ d.get("income_tax_expense_benefit_deferred")
+ ),
+ interest_and_debt_expense=DataPoint.from_dict(
+ d.get("interest_and_debt_expense")
+ ),
+ interest_and_dividend_income_operating=DataPoint.from_dict(
+ d.get("interest_and_dividend_income_operating")
+ ),
+ interest_expense_operating=DataPoint.from_dict(
+ d.get("interest_expense_operating")
+ ),
+ interest_income_expense_after_provision_for_losses=DataPoint.from_dict(
+ d.get("interest_income_expense_after_provision_for_losses")
+ ),
+ interest_income_expense_operating_net=DataPoint.from_dict(
+ d.get("interest_income_expense_operating_net")
+ ),
+ noninterest_expense=DataPoint.from_dict(d.get("noninterest_expense")),
+ noninterest_income=DataPoint.from_dict(d.get("noninterest_income")),
+ provision_for_loan_lease_and_other_losses=DataPoint.from_dict(
+ d.get("provision_for_loan_lease_and_other_losses")
+ ),
+ net_income_loss=DataPoint.from_dict(d.get("net_income_loss")),
+ net_income_loss_attributable_to_noncontrolling_interest=DataPoint.from_dict(
+ d.get("net_income_loss_attributable_to_noncontrolling_interest")
+ ),
+ net_income_loss_attributable_to_nonredeemable_noncontrolling_interest=DataPoint.from_dict(
+ d.get(
+ "net_income_loss_attributable_to_nonredeemable_noncontrolling_interest"
+ )
+ ),
+ net_income_loss_attributable_to_parent=DataPoint.from_dict(
+ d.get("net_income_loss_attributable_to_parent")
+ ),
+ net_income_loss_attributable_to_redeemable_noncontrolling_interest=DataPoint.from_dict(
+ d.get(
+ "net_income_loss_attributable_to_redeemable_noncontrolling_interest"
+ )
+ ),
+ net_income_loss_available_to_common_stockholders_basic=DataPoint.from_dict(
+ d.get("net_income_loss_available_to_common_stockholders_basic")
+ ),
+ participating_securities_distributed_and_undistributed_earnings_loss_basic=DataPoint.from_dict(
+ d.get(
+ "participating_securities_distributed_and_undistributed_earnings_loss_basic"
+ )
+ ),
+ undistributed_earnings_loss_allocated_to_participating_securities_basic=DataPoint.from_dict(
+ d.get(
+ "undistributed_earnings_loss_allocated_to_participating_securities_basic"
+ )
+ ),
+ preferred_stock_dividends_and_other_adjustments=DataPoint.from_dict(
+ d.get("preferred_stock_dividends_and_other_adjustments")
+ ),
+ basic_earnings_per_share=DataPoint.from_dict(
+ d.get("basic_earnings_per_share")
+ ),
+ diluted_earnings_per_share=DataPoint.from_dict(
+ d.get("diluted_earnings_per_share")
+ ),
+ basic_average_shares=DataPoint.from_dict(d.get("basic_average_shares")),
+ diluted_average_shares=DataPoint.from_dict(d.get("diluted_average_shares")),
+ common_stock_dividends=DataPoint.from_dict(d.get("common_stock_dividends")),
)
+@dataclass
@modelclass
class Financials:
- "Contains financial data."
- balance_sheet: Optional[Dict[str, DataPoint]] = None
+ """
+ Contains data for:
+ - balance_sheet (BalanceSheet)
+ - cash_flow_statement (CashFlowStatement)
+ - comprehensive_income (ComprehensiveIncome)
+ - income_statement (IncomeStatement)
+ """
+
+ balance_sheet: Optional[BalanceSheet] = None
cash_flow_statement: Optional[CashFlowStatement] = None
comprehensive_income: Optional[ComprehensiveIncome] = None
income_statement: Optional[IncomeStatement] = None
@staticmethod
- def from_dict(d):
+ def from_dict(d: Optional[Dict[str, Any]]) -> "Financials":
+ if not d:
+ return Financials()
return Financials(
- balance_sheet=(
- None
- if "balance_sheet" not in d
- else {
- k: DataPoint.from_dict(v) for (k, v) in d["balance_sheet"].items()
- }
- ),
- cash_flow_statement=(
- None
- if "cash_flow_statement" not in d
- else CashFlowStatement.from_dict(d["cash_flow_statement"])
- ),
- comprehensive_income=(
- None
- if "comprehensive_income" not in d
- else ComprehensiveIncome.from_dict(d["comprehensive_income"])
- ),
- income_statement=(
- None
- if "income_statement" not in d
- else IncomeStatement.from_dict(d["income_statement"])
+ balance_sheet=BalanceSheet.from_dict(d.get("balance_sheet")),
+ cash_flow_statement=CashFlowStatement.from_dict(
+ d.get("cash_flow_statement")
+ ),
+ comprehensive_income=ComprehensiveIncome.from_dict(
+ d.get("comprehensive_income")
),
+ income_statement=IncomeStatement.from_dict(d.get("income_statement")),
)
+@dataclass
@modelclass
class StockFinancial:
- "StockFinancial contains historical financial data for a stock ticker."
+ """
+ StockFinancial contains historical financial data for a stock ticker.
+ The 'financials' attribute references an instance of Financials
+ which has typed sub-statements.
+ """
+
cik: Optional[str] = None
company_name: Optional[str] = None
end_date: Optional[str] = None
@@ -335,18 +510,18 @@ class StockFinancial:
start_date: Optional[str] = None
@staticmethod
- def from_dict(d):
+ def from_dict(d: Optional[Dict[str, Any]]) -> "StockFinancial":
+ if not d:
+ return StockFinancial()
return StockFinancial(
- cik=d.get("cik", None),
- company_name=d.get("company_name", None),
- end_date=d.get("end_date", None),
- filing_date=d.get("filing_date", None),
- financials=(
- None if "financials" not in d else Financials.from_dict(d["financials"])
- ),
- fiscal_period=d.get("fiscal_period", None),
- fiscal_year=d.get("fiscal_year", None),
- source_filing_file_url=d.get("source_filing_file_url", None),
- source_filing_url=d.get("source_filing_url", None),
- start_date=d.get("start_date", None),
+ cik=d.get("cik"),
+ company_name=d.get("company_name"),
+ end_date=d.get("end_date"),
+ filing_date=d.get("filing_date"),
+ financials=Financials.from_dict(d.get("financials", {})),
+ fiscal_period=d.get("fiscal_period"),
+ fiscal_year=d.get("fiscal_year"),
+ source_filing_file_url=d.get("source_filing_file_url"),
+ source_filing_url=d.get("source_filing_url"),
+ start_date=d.get("start_date"),
)
diff --git a/polygon/rest/models/snapshot.py b/polygon/rest/models/snapshot.py
index ceb5f7f8..3d38abe2 100644
--- a/polygon/rest/models/snapshot.py
+++ b/polygon/rest/models/snapshot.py
@@ -308,6 +308,8 @@ class UniversalSnapshotSession:
change_percent: Optional[float] = None
early_trading_change: Optional[float] = None
early_trading_change_percent: Optional[float] = None
+ regular_trading_change: Optional[float] = None
+ regular_trading_change_percent: Optional[float] = None
late_trading_change: Optional[float] = None
late_trading_change_percent: Optional[float] = None
open: Optional[float] = None
diff --git a/polygon/rest/models/splits.py b/polygon/rest/models/splits.py
index 93244c50..5fb27129 100644
--- a/polygon/rest/models/splits.py
+++ b/polygon/rest/models/splits.py
@@ -5,6 +5,7 @@
@modelclass
class Split:
"Split contains data for a historical stock split, including the ticker symbol, the execution date, and the factors of the split ratio."
+ id: Optional[int] = None
execution_date: Optional[str] = None
split_from: Optional[int] = None
split_to: Optional[int] = None
diff --git a/polygon/rest/models/tickers.py b/polygon/rest/models/tickers.py
index 2554927e..317275ed 100644
--- a/polygon/rest/models/tickers.py
+++ b/polygon/rest/models/tickers.py
@@ -253,3 +253,60 @@ class TickerChangeResults:
@staticmethod
def from_dict(d):
return TickerChangeResults(**d)
+
+
+from typing import Optional
+from ...modelclass import modelclass
+
+
+@modelclass
+class IPOListing:
+ """
+ IPO Listing data as returned by the /vX/reference/ipos endpoint.
+ """
+
+ announced_date: Optional[str] = None
+ currency_code: Optional[str] = None
+ final_issue_price: Optional[float] = None
+ highest_offer_price: Optional[float] = None
+ ipo_status: Optional[str] = None
+ isin: Optional[str] = None
+ issuer_name: Optional[str] = None
+ last_updated: Optional[str] = None
+ listing_date: Optional[str] = None
+ lot_size: Optional[int] = None
+ lowest_offer_price: Optional[float] = None
+ max_shares_offered: Optional[int] = None
+ min_shares_offered: Optional[int] = None
+ primary_exchange: Optional[str] = None
+ security_description: Optional[str] = None
+ security_type: Optional[str] = None
+ shares_outstanding: Optional[int] = None
+ ticker: Optional[str] = None
+ total_offer_size: Optional[float] = None
+ us_code: Optional[str] = None
+
+ @staticmethod
+ def from_dict(d):
+ return IPOListing(
+ announced_date=d.get("announced_date"),
+ currency_code=d.get("currency_code"),
+ final_issue_price=d.get("final_issue_price"),
+ highest_offer_price=d.get("highest_offer_price"),
+ ipo_status=d.get("ipo_status"),
+ isin=d.get("isin"),
+ issuer_name=d.get("issuer_name"),
+ last_updated=d.get("last_updated"),
+ listing_date=d.get("listing_date"),
+ lot_size=d.get("lot_size"),
+ lowest_offer_price=d.get("lowest_offer_price"),
+ max_shares_offered=d.get("max_shares_offered"),
+ min_shares_offered=d.get("min_shares_offered"),
+ primary_exchange=d.get("primary_exchange"),
+ security_description=d.get("security_description"),
+ security_type=d.get("security_type"),
+ shares_outstanding=d.get("shares_outstanding"),
+ ticker=d.get("ticker"),
+ total_offer_size=d.get("total_offer_size"),
+ us_code=d.get("us_code"),
+ )
diff --git a/polygon/rest/vX.py b/polygon/rest/vX.py
index a7c13a2f..228134d4 100644
--- a/polygon/rest/vX.py
+++ b/polygon/rest/vX.py
@@ -1,6 +1,6 @@
from .base import BaseClient
-from typing import Optional, Any, Dict, Union, Iterator
-from .models import StockFinancial, Timeframe, Sort, Order
+from typing import Optional, Any, Dict, List, Union, Iterator
+from .models import StockFinancial, IPOListing, Timeframe, Sort, Order
from urllib3 import HTTPResponse
from datetime import datetime, date
@@ -70,3 +70,44 @@ def list_stock_financials(
deserializer=StockFinancial.from_dict,
options=options,
)
+
+ def list_ipos(
+ self,
+ ticker: Optional[str] = None,
+ us_code: Optional[str] = None,
+ isin: Optional[str] = None,
+ listing_date: Optional[str] = None,
+ ipo_status: Optional[str] = None,
+ limit: Optional[int] = None,
+ sort: Optional[Union[str, Sort]] = None,
+ order: Optional[Union[str, Order]] = None,
+ params: Optional[Dict[str, Any]] = None,
+ raw: bool = False,
+ options: Optional[RequestOptionBuilder] = None,
+ ) -> Union[List[IPOListing], HTTPResponse]:
+ """
+ Retrieve upcoming or historical IPOs.
+
+ :param ticker: Filter by a case-sensitive ticker symbol.
+ :param us_code: Filter by a US code (unique identifier for a North American financial security).
+ :param isin: Filter by an International Securities Identification Number (ISIN).
+ :param listing_date: Filter by the listing date (YYYY-MM-DD).
+ :param ipo_status: Filter by IPO status (e.g. "new", "pending", "history", etc.).
+ :param limit: Limit the number of results per page. Default 10, max 1000.
+ :param sort: Field to sort by. Default is "listing_date".
+ :param order: Order results based on the sort field ("asc" or "desc"). Default "desc".
+ :param params: Additional query params.
+ :param raw: Return raw HTTPResponse object if True, else return List[IPOListing].
+ :param options: RequestOptionBuilder for additional headers or params.
+ :return: A list of IPOListing objects or HTTPResponse if raw=True.
+ """
+ url = "/vX/reference/ipos"
+
+ return self._paginate(
+ path=url,
+ params=self._get_params(self.list_ipos, locals()),
+ deserializer=IPOListing.from_dict,
+ raw=raw,
+ result_key="results",
+ options=options,
+ )
diff --git a/pyproject.toml b/pyproject.toml
index dffc7ae4..83794997 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -26,21 +26,21 @@ packages = [
[tool.poetry.dependencies]
python = "^3.8"
urllib3 = ">=1.26.9,<3.0.0"
-websockets = ">=10.3,<13.0"
-certifi = ">=2022.5.18,<2025.0.0"
+websockets = ">=10.3,<15.0"
+certifi = ">=2022.5.18,<2026.0.0"
[tool.poetry.dev-dependencies]
-black = "^24.4.2"
-mypy = "^1.10"
+black = "^24.8.0"
+mypy = "^1.13"
types-urllib3 = "^1.26.25"
Sphinx = "^7.1.2"
-sphinx-rtd-theme = "^2.0.0"
+sphinx-rtd-theme = "^3.0.2"
# keep this in sync with docs/requirements.txt for readthedocs.org
sphinx-autodoc-typehints = "^2.0.1"
types-certifi = "^2021.10.8"
-types-setuptools = "^70.3.0"
-pook = "^2.0.0"
-orjson = "^3.10.6"
+types-setuptools = "^75.6.0"
+pook = "^2.0.1"
+orjson = "^3.10.13"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/test_rest/mocks/vX/reference/financials.json b/test_rest/mocks/vX/reference/financials.json
index c5e18621..ae84513b 100644
--- a/test_rest/mocks/vX/reference/financials.json
+++ b/test_rest/mocks/vX/reference/financials.json
@@ -45,12 +45,6 @@
"unit": "USD",
"order": 400
},
- "other_than_fixed_noncurrent_assets": {
- "label": "Other Than Fixed Noncurrent Assets",
- "value": 1.6046e+10,
- "unit": "USD",
- "order": 500
- },
"noncurrent_liabilities": {
"label": "Noncurrent Liabilities",
"value": 1.1716e+10,
diff --git a/test_rest/test_financials.py b/test_rest/test_financials.py
deleted file mode 100644
index f5196212..00000000
--- a/test_rest/test_financials.py
+++ /dev/null
@@ -1,234 +0,0 @@
-from polygon.rest.models import (
- StockFinancial,
- Financials,
- DataPoint,
- CashFlowStatement,
- ExchangeGainsLosses,
- NetCashFlow,
- NetCashFlowFromFinancingActivities,
- ComprehensiveIncome,
- ComprehensiveIncomeLoss,
- ComprehensiveIncomeLossAttributableToParent,
- OtherComprehensiveIncomeLoss,
- IncomeStatement,
- BasicEarningsPerShare,
- CostOfRevenue,
- GrossProfit,
- OperatingExpenses,
- Revenues,
-)
-from base import BaseTest
-
-
-class FinancialsTest(BaseTest):
- def test_list_stock_financials(self):
- financials = [f for f in self.c.vx.list_stock_financials()]
- expected = [
- StockFinancial(
- cik="0001413447",
- company_name="NXP Semiconductors N.V.",
- end_date="2022-04-03",
- filing_date="2022-05-03",
- financials=Financials(
- balance_sheet={
- "equity_attributable_to_noncontrolling_interest": DataPoint(
- formula=None,
- label="Equity Attributable To Noncontrolling Interest",
- order=1500,
- unit="USD",
- value=251000000.0,
- xpath=None,
- ),
- "liabilities": DataPoint(
- formula=None,
- label="Liabilities",
- order=600,
- unit="USD",
- value=14561000000.0,
- xpath=None,
- ),
- "equity_attributable_to_parent": DataPoint(
- formula=None,
- label="Equity Attributable To Parent",
- order=1600,
- unit="USD",
- value=6509000000.0,
- xpath=None,
- ),
- "noncurrent_assets": DataPoint(
- formula=None,
- label="Noncurrent Assets",
- order=300,
- unit="USD",
- value=16046000000.0,
- xpath=None,
- ),
- "liabilities_and_equity": DataPoint(
- formula=None,
- label="Liabilities And Equity",
- order=1900,
- unit="USD",
- value=21321000000.0,
- xpath=None,
- ),
- "assets": DataPoint(
- formula=None,
- label="Assets",
- order=100,
- unit="USD",
- value=21321000000.0,
- xpath=None,
- ),
- "fixed_assets": DataPoint(
- formula=None,
- label="Fixed Assets",
- order=400,
- unit="USD",
- value=2814000000.0,
- xpath=None,
- ),
- "other_than_fixed_noncurrent_assets": DataPoint(
- formula=None,
- label="Other Than Fixed Noncurrent Assets",
- order=500,
- unit="USD",
- value=16046000000.0,
- xpath=None,
- ),
- "noncurrent_liabilities": DataPoint(
- formula=None,
- label="Noncurrent Liabilities",
- order=800,
- unit="USD",
- value=11716000000.0,
- xpath=None,
- ),
- "current_assets": DataPoint(
- formula=None,
- label="Current Assets",
- order=200,
- unit="USD",
- value=5275000000.0,
- xpath=None,
- ),
- "equity": DataPoint(
- formula=None,
- label="Equity",
- order=1400,
- unit="USD",
- value=6760000000.0,
- xpath=None,
- ),
- "current_liabilities": DataPoint(
- formula=None,
- label="Current Liabilities",
- order=700,
- unit="USD",
- value=2845000000.0,
- xpath=None,
- ),
- },
- cash_flow_statement=CashFlowStatement(
- exchange_gains_losses=ExchangeGainsLosses(
- formula=None,
- label="Exchange Gains/Losses",
- order=1000,
- unit="USD",
- value=0,
- xpath=None,
- ),
- net_cash_flow=NetCashFlow(
- formula=None,
- label="Net Cash Flow",
- order=1100,
- unit="USD",
- value=-147000000.0,
- xpath=None,
- ),
- net_cash_flow_from_financing_activities=NetCashFlowFromFinancingActivities(
- formula=None,
- label="Net Cash Flow From Financing Activities",
- order=700,
- unit="USD",
- value=-674000000.0,
- xpath=None,
- ),
- ),
- comprehensive_income=ComprehensiveIncome(
- comprehensive_income_loss=ComprehensiveIncomeLoss(
- formula=None,
- label="Comprehensive Income/Loss",
- order=100,
- unit="USD",
- value=644000000.0,
- xpath=None,
- ),
- comprehensive_income_loss_attributable_to_parent=ComprehensiveIncomeLossAttributableToParent(
- formula=None,
- label="Comprehensive Income/Loss Attributable To Parent",
- order=300,
- unit="USD",
- value=635000000.0,
- xpath=None,
- ),
- other_comprehensive_income_loss=OtherComprehensiveIncomeLoss(
- formula=None,
- label="Other Comprehensive Income/Loss",
- order=400,
- unit="USD",
- value=-22000000.0,
- xpath=None,
- ),
- ),
- income_statement=IncomeStatement(
- basic_earnings_per_share=BasicEarningsPerShare(
- formula=None,
- label="Basic Earnings Per Share",
- order=4200,
- unit="USD / shares",
- value=2.5,
- xpath=None,
- ),
- cost_of_revenue=CostOfRevenue(
- formula=None,
- label="Cost Of Revenue",
- order=300,
- unit="USD",
- value=1359000000.0,
- xpath=None,
- ),
- gross_profit=GrossProfit(
- formula=None,
- label="Gross Profit",
- order=800,
- unit="USD",
- value=1777000000.0,
- xpath=None,
- ),
- operating_expenses=OperatingExpenses(
- formula=None,
- label="Operating Expenses",
- order=1000,
- unit="USD",
- value=904000000.0,
- xpath=None,
- ),
- revenues=Revenues(
- formula=None,
- label="Revenues",
- order=100,
- unit="USD",
- value=3136000000.0,
- xpath=None,
- ),
- ),
- ),
- fiscal_period="Q1",
- fiscal_year="2022",
- source_filing_file_url="https://api.polygon.io/v1/reference/sec/filings/0001413447-22-000014/files/nxpi-20220403_htm.xml",
- source_filing_url="https://api.polygon.io/v1/reference/sec/filings/0001413447-22-000014",
- start_date="2022-01-01",
- )
- ]
-
- self.assertEqual(financials, expected)
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy