Skip to content

Commit a363476

Browse files
author
clickingbuttons
authored
copy rfc over (#114)
1 parent 1ab00a9 commit a363476

File tree

11 files changed

+242
-4686
lines changed

11 files changed

+242
-4686
lines changed

polygon/rest/__init__.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,6 @@
1-
from .client import RESTClient
1+
from .aggs import AggsClient
2+
from .trades import TradesClient
3+
4+
class RESTClient(AggsClient, TradesClient):
5+
pass
6+

polygon/rest/aggs.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from .base import BaseClient
2+
from typing import Optional, Any, Dict, List, Union
3+
from .models import Agg, Sort
4+
5+
# https://polygon.io/docs/stocks
6+
class AggsClient(BaseClient):
7+
def get_aggs(self,
8+
ticker: str,
9+
multiplier: int,
10+
timespan: str,
11+
# "from" is a keyword in python https://www.w3schools.com/python/python_ref_keywords.asp
12+
from_: str,
13+
to: str,
14+
adjusted: Optional[bool]=None,
15+
sort: Optional[Union[str, Sort]]=None,
16+
limit: Optional[int]=None,
17+
params: Optional[Dict[str, Any]]=None,
18+
raw: bool=False
19+
) -> List[Agg]:
20+
"""
21+
Get aggregate bars for a ticker over a given date range in custom time window sizes.
22+
23+
:param ticker: The ticker symbol.
24+
:param multiplier: The size of the timespan multiplier.
25+
:param timespan: The size of the time window.
26+
:param _from: The start of the aggregate time window.
27+
:param to: The end of the aggregate time window.
28+
:param adjusted: Whether or not the results are adjusted for splits. By default, results are adjusted. Set this to false to get results that are NOT adjusted for splits.
29+
:param sort: Sort the results by timestamp. asc will return results in ascending order (oldest at the top), desc will return results in descending order (newest at the top).The end of the aggregate time window.
30+
:param limit: Limits the number of base aggregates queried to create the aggregate results. Max 50000 and Default 5000. Read more about how limit is used to calculate aggregate results in our article on Aggregate Data API Improvements.
31+
:param params: Any additional query params
32+
:param raw: Return raw object instead of results object
33+
:return: List of aggregates
34+
:rtype: List[Agg]
35+
"""
36+
url = f"/v2/aggs/ticker/{ticker}/range/{multiplier}/{timespan}/{from_}/{to}"
37+
38+
return self._get(path=url, params=self._get_params(self.get_aggs, locals()), resultKey="results", deserializer=Agg.from_dict, raw=raw)
39+

polygon/rest/base.py

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
import os
2+
import json
3+
import urllib3
4+
import inspect
5+
from enum import Enum
6+
from typing import Optional, Any
7+
8+
base = 'https://api.polygon.io'
9+
env_key = "POLYGON_API_KEY"
10+
11+
# https://urllib3.readthedocs.io/en/stable/reference/urllib3.poolmanager.html
12+
class BaseClient:
13+
def __init__(
14+
self,
15+
api_key: Optional[str] = os.getenv(env_key),
16+
connect_timeout: float = 10.0,
17+
read_timeout: float = 10.0,
18+
num_pools: int = 10,
19+
retries = 3,
20+
base: str = base
21+
):
22+
if api_key is None:
23+
raise Exception(f"Must specify env var {env_key} or pass api_key in constructor")
24+
self.API_KEY = api_key
25+
self.BASE = base
26+
27+
# https://urllib3.readthedocs.io/en/stable/reference/urllib3.connectionpool.html#urllib3.HTTPConnectionPool
28+
self.client = urllib3.PoolManager(num_pools=num_pools, headers={
29+
'Authorization': 'Bearer ' + self.API_KEY
30+
})
31+
self.timeout=urllib3.Timeout(connect=connect_timeout, read=read_timeout)
32+
self.retries = retries
33+
34+
def _decode(self, resp):
35+
return json.loads(resp.data.decode('utf-8'))
36+
37+
def _get(self, path: str, params: Optional[dict] = None, resultKey: Optional[str] = None, deserializer = None, raw: bool = False) -> Any:
38+
if params is None:
39+
params = {}
40+
params = {str(k): str(v) for k, v in params.items() if v is not None}
41+
resp = self.client.request('GET', self.BASE + path, fields=params, retries=self.retries)
42+
43+
if resp.status != 200:
44+
raise Exception(resp.data.decode('utf-8'))
45+
46+
if raw:
47+
return resp
48+
49+
obj = self._decode(resp)
50+
51+
if resultKey:
52+
obj = obj[resultKey]
53+
54+
if deserializer:
55+
obj = [deserializer(o) for o in obj]
56+
57+
return obj
58+
59+
def _get_params(self, fn, caller_locals):
60+
params = caller_locals["params"]
61+
if params is None:
62+
params = {}
63+
# https://docs.python.org/3.7/library/inspect.html#inspect.Signature
64+
for argname, v in inspect.signature(fn).parameters.items():
65+
# https://docs.python.org/3.7/library/inspect.html#inspect.Parameter
66+
if argname in ['params', 'raw']:
67+
continue
68+
if v.default != v.empty:
69+
# timestamp_lt -> timestamp.lt
70+
val = caller_locals.get(argname, v.default)
71+
if isinstance(val, Enum):
72+
val = val.value
73+
if val is not None:
74+
params[argname.replace("_", ".")] = val
75+
76+
return params
77+
78+
def _paginate(self, path: str, params: dict, raw: bool, deserializer):
79+
while True:
80+
resp = self._get(path=path, params=params, deserializer=deserializer, raw=True)
81+
if raw:
82+
return resp
83+
decoded = self._decode(resp)
84+
for t in decoded["results"]:
85+
yield deserializer(t)
86+
if "next_url" in decoded:
87+
path = decoded["next_url"].replace(self.BASE, '')
88+
params = {}
89+
else:
90+
return

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy