Remove windows python distribution from repo and add requirements.txt
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,5 +2,8 @@ __pycache__/
|
||||
*$py.class
|
||||
debug/
|
||||
data/
|
||||
python/
|
||||
banned_addresses.txt
|
||||
settings.txt
|
||||
|
||||
*venv*
|
||||
|
||||
Binary file not shown.
Binary file not shown.
BIN
python/_bz2.pyd
BIN
python/_bz2.pyd
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
python/_lzma.pyd
BIN
python/_lzma.pyd
Binary file not shown.
BIN
python/_msi.pyd
BIN
python/_msi.pyd
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
python/_ssl.pyd
BIN
python/_ssl.pyd
Binary file not shown.
@@ -1,12 +0,0 @@
|
||||
from .atom import parse_atom_file, parse_atom_bytes
|
||||
from .rss import parse_rss_file, parse_rss_bytes
|
||||
from .json_feed import (
|
||||
parse_json_feed, parse_json_feed_file, parse_json_feed_bytes
|
||||
)
|
||||
from .opml import parse_opml_file, parse_opml_bytes
|
||||
from .exceptions import (
|
||||
FeedParseError, FeedDocumentError, FeedXMLError, FeedJSONError
|
||||
)
|
||||
from .const import VERSION
|
||||
|
||||
__version__ = VERSION
|
||||
@@ -1,284 +0,0 @@
|
||||
from datetime import datetime
|
||||
import enum
|
||||
from io import BytesIO
|
||||
from typing import Optional, List
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
import attr
|
||||
|
||||
from .utils import (
|
||||
parse_xml, get_child, get_text, get_datetime, FeedParseError, ns
|
||||
)
|
||||
|
||||
|
||||
class AtomTextType(enum.Enum):
|
||||
text = "text"
|
||||
html = "html"
|
||||
xhtml = "xhtml"
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomTextConstruct:
|
||||
text_type: str = attr.ib()
|
||||
lang: Optional[str] = attr.ib()
|
||||
value: str = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomEntry:
|
||||
title: AtomTextConstruct = attr.ib()
|
||||
id_: str = attr.ib()
|
||||
|
||||
# Should be mandatory but many feeds use published instead
|
||||
updated: Optional[datetime] = attr.ib()
|
||||
|
||||
authors: List['AtomPerson'] = attr.ib()
|
||||
contributors: List['AtomPerson'] = attr.ib()
|
||||
links: List['AtomLink'] = attr.ib()
|
||||
categories: List['AtomCategory'] = attr.ib()
|
||||
published: Optional[datetime] = attr.ib()
|
||||
rights: Optional[AtomTextConstruct] = attr.ib()
|
||||
summary: Optional[AtomTextConstruct] = attr.ib()
|
||||
content: Optional[AtomTextConstruct] = attr.ib()
|
||||
source: Optional['AtomFeed'] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomFeed:
|
||||
title: Optional[AtomTextConstruct] = attr.ib()
|
||||
id_: str = attr.ib()
|
||||
|
||||
# Should be mandatory but many feeds do not include it
|
||||
updated: Optional[datetime] = attr.ib()
|
||||
|
||||
authors: List['AtomPerson'] = attr.ib()
|
||||
contributors: List['AtomPerson'] = attr.ib()
|
||||
links: List['AtomLink'] = attr.ib()
|
||||
categories: List['AtomCategory'] = attr.ib()
|
||||
generator: Optional['AtomGenerator'] = attr.ib()
|
||||
subtitle: Optional[AtomTextConstruct] = attr.ib()
|
||||
rights: Optional[AtomTextConstruct] = attr.ib()
|
||||
icon: Optional[str] = attr.ib()
|
||||
logo: Optional[str] = attr.ib()
|
||||
|
||||
entries: List[AtomEntry] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomPerson:
|
||||
name: str = attr.ib()
|
||||
uri: Optional[str] = attr.ib()
|
||||
email: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomLink:
|
||||
href: str = attr.ib()
|
||||
rel: Optional[str] = attr.ib()
|
||||
type_: Optional[str] = attr.ib()
|
||||
hreflang: Optional[str] = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
length: Optional[int] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomCategory:
|
||||
term: str = attr.ib()
|
||||
scheme: Optional[str] = attr.ib()
|
||||
label: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class AtomGenerator:
|
||||
name: str = attr.ib()
|
||||
uri: Optional[str] = attr.ib()
|
||||
version: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
def _get_generator(element: Element, name,
|
||||
optional: bool=True) -> Optional[AtomGenerator]:
|
||||
child = get_child(element, name, optional)
|
||||
if child is None:
|
||||
return None
|
||||
|
||||
return AtomGenerator(
|
||||
child.text.strip(),
|
||||
child.attrib.get('uri'),
|
||||
child.attrib.get('version'),
|
||||
)
|
||||
|
||||
|
||||
def _get_text_construct(element: Element, name,
|
||||
optional: bool=True) -> Optional[AtomTextConstruct]:
|
||||
child = get_child(element, name, optional)
|
||||
if child is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
text_type = AtomTextType(child.attrib['type'])
|
||||
except KeyError:
|
||||
text_type = AtomTextType.text
|
||||
|
||||
try:
|
||||
lang = child.lang
|
||||
except AttributeError:
|
||||
lang = None
|
||||
|
||||
if child.text is None:
|
||||
if optional:
|
||||
return None
|
||||
|
||||
raise FeedParseError(
|
||||
'Could not parse atom feed: "{}" text is required but is empty'
|
||||
.format(name)
|
||||
)
|
||||
|
||||
return AtomTextConstruct(
|
||||
text_type,
|
||||
lang,
|
||||
child.text.strip()
|
||||
)
|
||||
|
||||
|
||||
def _get_person(element: Element) -> Optional[AtomPerson]:
|
||||
try:
|
||||
return AtomPerson(
|
||||
get_text(element, 'feed:name', optional=False),
|
||||
get_text(element, 'feed:uri'),
|
||||
get_text(element, 'feed:email')
|
||||
)
|
||||
except FeedParseError:
|
||||
return None
|
||||
|
||||
|
||||
def _get_link(element: Element) -> AtomLink:
|
||||
length = element.attrib.get('length')
|
||||
length = int(length) if length else None
|
||||
return AtomLink(
|
||||
element.attrib['href'],
|
||||
element.attrib.get('rel'),
|
||||
element.attrib.get('type'),
|
||||
element.attrib.get('hreflang'),
|
||||
element.attrib.get('title'),
|
||||
length
|
||||
)
|
||||
|
||||
|
||||
def _get_category(element: Element) -> AtomCategory:
|
||||
return AtomCategory(
|
||||
element.attrib['term'],
|
||||
element.attrib.get('scheme'),
|
||||
element.attrib.get('label'),
|
||||
)
|
||||
|
||||
|
||||
def _get_entry(element: Element,
|
||||
default_authors: List[AtomPerson]) -> AtomEntry:
|
||||
root = element
|
||||
|
||||
# Mandatory
|
||||
title = _get_text_construct(root, 'feed:title')
|
||||
id_ = get_text(root, 'feed:id')
|
||||
|
||||
# Optional
|
||||
try:
|
||||
source = _parse_atom(get_child(root, 'feed:source', optional=False),
|
||||
parse_entries=False)
|
||||
except FeedParseError:
|
||||
source = None
|
||||
source_authors = []
|
||||
else:
|
||||
source_authors = source.authors
|
||||
|
||||
authors = [_get_person(e)
|
||||
for e in root.findall('feed:author', ns)] or default_authors
|
||||
authors = [a for a in authors if a is not None]
|
||||
authors = authors or default_authors or source_authors
|
||||
|
||||
contributors = [_get_person(e)
|
||||
for e in root.findall('feed:contributor', ns) if e]
|
||||
contributors = [c for c in contributors if c is not None]
|
||||
|
||||
links = [_get_link(e) for e in root.findall('feed:link', ns)]
|
||||
categories = [_get_category(e) for e in root.findall('feed:category', ns)]
|
||||
|
||||
updated = get_datetime(root, 'feed:updated')
|
||||
published = get_datetime(root, 'feed:published')
|
||||
rights = _get_text_construct(root, 'feed:rights')
|
||||
summary = _get_text_construct(root, 'feed:summary')
|
||||
content = _get_text_construct(root, 'feed:content')
|
||||
|
||||
return AtomEntry(
|
||||
title,
|
||||
id_,
|
||||
updated,
|
||||
authors,
|
||||
contributors,
|
||||
links,
|
||||
categories,
|
||||
published,
|
||||
rights,
|
||||
summary,
|
||||
content,
|
||||
source
|
||||
)
|
||||
|
||||
|
||||
def _parse_atom(root: Element, parse_entries: bool=True) -> AtomFeed:
|
||||
# Mandatory
|
||||
id_ = get_text(root, 'feed:id', optional=False)
|
||||
|
||||
# Optional
|
||||
title = _get_text_construct(root, 'feed:title')
|
||||
updated = get_datetime(root, 'feed:updated')
|
||||
authors = [_get_person(e)
|
||||
for e in root.findall('feed:author', ns) if e]
|
||||
authors = [a for a in authors if a is not None]
|
||||
contributors = [_get_person(e)
|
||||
for e in root.findall('feed:contributor', ns) if e]
|
||||
contributors = [c for c in contributors if c is not None]
|
||||
links = [_get_link(e)
|
||||
for e in root.findall('feed:link', ns)]
|
||||
categories = [_get_category(e)
|
||||
for e in root.findall('feed:category', ns)]
|
||||
|
||||
generator = _get_generator(root, 'feed:generator')
|
||||
subtitle = _get_text_construct(root, 'feed:subtitle')
|
||||
rights = _get_text_construct(root, 'feed:rights')
|
||||
icon = get_text(root, 'feed:icon')
|
||||
logo = get_text(root, 'feed:logo')
|
||||
|
||||
if parse_entries:
|
||||
entries = [_get_entry(e, authors)
|
||||
for e in root.findall('feed:entry', ns)]
|
||||
else:
|
||||
entries = []
|
||||
|
||||
atom_feed = AtomFeed(
|
||||
title,
|
||||
id_,
|
||||
updated,
|
||||
authors,
|
||||
contributors,
|
||||
links,
|
||||
categories,
|
||||
generator,
|
||||
subtitle,
|
||||
rights,
|
||||
icon,
|
||||
logo,
|
||||
entries
|
||||
)
|
||||
return atom_feed
|
||||
|
||||
|
||||
def parse_atom_file(filename: str) -> AtomFeed:
|
||||
"""Parse an Atom feed from a local XML file."""
|
||||
root = parse_xml(filename).getroot()
|
||||
return _parse_atom(root)
|
||||
|
||||
|
||||
def parse_atom_bytes(data: bytes) -> AtomFeed:
|
||||
"""Parse an Atom feed from a byte-string containing XML data."""
|
||||
root = parse_xml(BytesIO(data)).getroot()
|
||||
return _parse_atom(root)
|
||||
@@ -1 +0,0 @@
|
||||
VERSION = '0.0.13'
|
||||
@@ -1,14 +0,0 @@
|
||||
class FeedParseError(Exception):
|
||||
"""Document is an invalid feed."""
|
||||
|
||||
|
||||
class FeedDocumentError(Exception):
|
||||
"""Document is not a supported file."""
|
||||
|
||||
|
||||
class FeedXMLError(FeedDocumentError):
|
||||
"""Document is not valid XML."""
|
||||
|
||||
|
||||
class FeedJSONError(FeedDocumentError):
|
||||
"""Document is not valid JSON."""
|
||||
@@ -1,223 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
from typing import Optional, List
|
||||
|
||||
import attr
|
||||
|
||||
from .exceptions import FeedParseError, FeedJSONError
|
||||
from .utils import try_parse_date
|
||||
|
||||
|
||||
@attr.s
|
||||
class JSONFeedAuthor:
|
||||
|
||||
name: Optional[str] = attr.ib()
|
||||
url: Optional[str] = attr.ib()
|
||||
avatar: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class JSONFeedAttachment:
|
||||
|
||||
url: str = attr.ib()
|
||||
mime_type: str = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
size_in_bytes: Optional[int] = attr.ib()
|
||||
duration: Optional[timedelta] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class JSONFeedItem:
|
||||
|
||||
id_: str = attr.ib()
|
||||
url: Optional[str] = attr.ib()
|
||||
external_url: Optional[str] = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
content_html: Optional[str] = attr.ib()
|
||||
content_text: Optional[str] = attr.ib()
|
||||
summary: Optional[str] = attr.ib()
|
||||
image: Optional[str] = attr.ib()
|
||||
banner_image: Optional[str] = attr.ib()
|
||||
date_published: Optional[datetime] = attr.ib()
|
||||
date_modified: Optional[datetime] = attr.ib()
|
||||
author: Optional[JSONFeedAuthor] = attr.ib()
|
||||
|
||||
tags: List[str] = attr.ib()
|
||||
attachments: List[JSONFeedAttachment] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class JSONFeed:
|
||||
|
||||
version: str = attr.ib()
|
||||
title: str = attr.ib()
|
||||
home_page_url: Optional[str] = attr.ib()
|
||||
feed_url: Optional[str] = attr.ib()
|
||||
description: Optional[str] = attr.ib()
|
||||
user_comment: Optional[str] = attr.ib()
|
||||
next_url: Optional[str] = attr.ib()
|
||||
icon: Optional[str] = attr.ib()
|
||||
favicon: Optional[str] = attr.ib()
|
||||
author: Optional[JSONFeedAuthor] = attr.ib()
|
||||
expired: bool = attr.ib()
|
||||
|
||||
items: List[JSONFeedItem] = attr.ib()
|
||||
|
||||
|
||||
def _get_items(root: dict) -> List[JSONFeedItem]:
|
||||
rv = []
|
||||
items = root.get('items', [])
|
||||
if not items:
|
||||
return rv
|
||||
|
||||
for item in items:
|
||||
rv.append(_get_item(item))
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def _get_item(item_dict: dict) -> JSONFeedItem:
|
||||
return JSONFeedItem(
|
||||
id_=_get_text(item_dict, 'id', optional=False),
|
||||
url=_get_text(item_dict, 'url'),
|
||||
external_url=_get_text(item_dict, 'external_url'),
|
||||
title=_get_text(item_dict, 'title'),
|
||||
content_html=_get_text(item_dict, 'content_html'),
|
||||
content_text=_get_text(item_dict, 'content_text'),
|
||||
summary=_get_text(item_dict, 'summary'),
|
||||
image=_get_text(item_dict, 'image'),
|
||||
banner_image=_get_text(item_dict, 'banner_image'),
|
||||
date_published=_get_datetime(item_dict, 'date_published'),
|
||||
date_modified=_get_datetime(item_dict, 'date_modified'),
|
||||
author=_get_author(item_dict),
|
||||
tags=_get_tags(item_dict, 'tags'),
|
||||
attachments=_get_attachments(item_dict, 'attachments')
|
||||
)
|
||||
|
||||
|
||||
def _get_attachments(root, name) -> List[JSONFeedAttachment]:
|
||||
rv = list()
|
||||
for attachment_dict in root.get(name, []):
|
||||
rv.append(JSONFeedAttachment(
|
||||
_get_text(attachment_dict, 'url', optional=False),
|
||||
_get_text(attachment_dict, 'mime_type', optional=False),
|
||||
_get_text(attachment_dict, 'title'),
|
||||
_get_int(attachment_dict, 'size_in_bytes'),
|
||||
_get_duration(attachment_dict, 'duration_in_seconds')
|
||||
))
|
||||
return rv
|
||||
|
||||
|
||||
def _get_tags(root, name) -> List[str]:
|
||||
tags = root.get(name, [])
|
||||
return [tag for tag in tags if isinstance(tag, str)]
|
||||
|
||||
|
||||
def _get_datetime(root: dict, name, optional: bool=True) -> Optional[datetime]:
|
||||
text = _get_text(root, name, optional)
|
||||
if text is None:
|
||||
return None
|
||||
|
||||
return try_parse_date(text)
|
||||
|
||||
|
||||
def _get_expired(root: dict) -> bool:
|
||||
if root.get('expired') is True:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _get_author(root: dict) -> Optional[JSONFeedAuthor]:
|
||||
author_dict = root.get('author')
|
||||
if not author_dict:
|
||||
return None
|
||||
|
||||
rv = JSONFeedAuthor(
|
||||
name=_get_text(author_dict, 'name'),
|
||||
url=_get_text(author_dict, 'url'),
|
||||
avatar=_get_text(author_dict, 'avatar'),
|
||||
)
|
||||
if rv.name is None and rv.url is None and rv.avatar is None:
|
||||
return None
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def _get_int(root: dict, name: str, optional: bool=True) -> Optional[int]:
|
||||
rv = root.get(name)
|
||||
if not optional and rv is None:
|
||||
raise FeedParseError('Could not parse feed: "{}" int is required but '
|
||||
'is empty'.format(name))
|
||||
|
||||
if optional and rv is None:
|
||||
return None
|
||||
|
||||
if not isinstance(rv, int):
|
||||
raise FeedParseError('Could not parse feed: "{}" is not an int'
|
||||
.format(name))
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def _get_duration(root: dict, name: str,
|
||||
optional: bool=True) -> Optional[timedelta]:
|
||||
duration = _get_int(root, name, optional)
|
||||
if duration is None:
|
||||
return None
|
||||
|
||||
return timedelta(seconds=duration)
|
||||
|
||||
|
||||
def _get_text(root: dict, name: str, optional: bool=True) -> Optional[str]:
|
||||
rv = root.get(name)
|
||||
if not optional and rv is None:
|
||||
raise FeedParseError('Could not parse feed: "{}" text is required but '
|
||||
'is empty'.format(name))
|
||||
|
||||
if optional and rv is None:
|
||||
return None
|
||||
|
||||
if not isinstance(rv, str):
|
||||
raise FeedParseError('Could not parse feed: "{}" is not a string'
|
||||
.format(name))
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def parse_json_feed(root: dict) -> JSONFeed:
|
||||
return JSONFeed(
|
||||
version=_get_text(root, 'version', optional=False),
|
||||
title=_get_text(root, 'title', optional=False),
|
||||
home_page_url=_get_text(root, 'home_page_url'),
|
||||
feed_url=_get_text(root, 'feed_url'),
|
||||
description=_get_text(root, 'description'),
|
||||
user_comment=_get_text(root, 'user_comment'),
|
||||
next_url=_get_text(root, 'next_url'),
|
||||
icon=_get_text(root, 'icon'),
|
||||
favicon=_get_text(root, 'favicon'),
|
||||
author=_get_author(root),
|
||||
expired=_get_expired(root),
|
||||
items=_get_items(root)
|
||||
)
|
||||
|
||||
|
||||
def parse_json_feed_file(filename: str) -> JSONFeed:
|
||||
"""Parse a JSON feed from a local json file."""
|
||||
with open(filename) as f:
|
||||
try:
|
||||
root = json.load(f)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise FeedJSONError('Not a valid JSON document')
|
||||
|
||||
return parse_json_feed(root)
|
||||
|
||||
|
||||
def parse_json_feed_bytes(data: bytes) -> JSONFeed:
|
||||
"""Parse a JSON feed from a byte-string containing JSON data."""
|
||||
try:
|
||||
root = json.loads(data)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise FeedJSONError('Not a valid JSON document')
|
||||
|
||||
return parse_json_feed(root)
|
||||
@@ -1,107 +0,0 @@
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
from typing import Optional, List
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
import attr
|
||||
|
||||
from .utils import parse_xml, get_text, get_int, get_datetime
|
||||
|
||||
|
||||
@attr.s
|
||||
class OPMLOutline:
|
||||
text: Optional[str] = attr.ib()
|
||||
type: Optional[str] = attr.ib()
|
||||
xml_url: Optional[str] = attr.ib()
|
||||
description: Optional[str] = attr.ib()
|
||||
html_url: Optional[str] = attr.ib()
|
||||
language: Optional[str] = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
version: Optional[str] = attr.ib()
|
||||
|
||||
outlines: List['OPMLOutline'] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class OPML:
|
||||
title: Optional[str] = attr.ib()
|
||||
owner_name: Optional[str] = attr.ib()
|
||||
owner_email: Optional[str] = attr.ib()
|
||||
date_created: Optional[datetime] = attr.ib()
|
||||
date_modified: Optional[datetime] = attr.ib()
|
||||
expansion_state: Optional[str] = attr.ib()
|
||||
|
||||
vertical_scroll_state: Optional[int] = attr.ib()
|
||||
window_top: Optional[int] = attr.ib()
|
||||
window_left: Optional[int] = attr.ib()
|
||||
window_bottom: Optional[int] = attr.ib()
|
||||
window_right: Optional[int] = attr.ib()
|
||||
|
||||
outlines: List[OPMLOutline] = attr.ib()
|
||||
|
||||
|
||||
def _get_outlines(element: Element) -> List[OPMLOutline]:
|
||||
rv = list()
|
||||
|
||||
for outline in element.findall('outline'):
|
||||
rv.append(OPMLOutline(
|
||||
outline.attrib.get('text'),
|
||||
outline.attrib.get('type'),
|
||||
outline.attrib.get('xmlUrl'),
|
||||
outline.attrib.get('description'),
|
||||
outline.attrib.get('htmlUrl'),
|
||||
outline.attrib.get('language'),
|
||||
outline.attrib.get('title'),
|
||||
outline.attrib.get('version'),
|
||||
_get_outlines(outline)
|
||||
))
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def _parse_opml(root: Element) -> OPML:
|
||||
head = root.find('head')
|
||||
body = root.find('body')
|
||||
|
||||
return OPML(
|
||||
get_text(head, 'title'),
|
||||
get_text(head, 'ownerName'),
|
||||
get_text(head, 'ownerEmail'),
|
||||
get_datetime(head, 'dateCreated'),
|
||||
get_datetime(head, 'dateModified'),
|
||||
get_text(head, 'expansionState'),
|
||||
get_int(head, 'vertScrollState'),
|
||||
get_int(head, 'windowTop'),
|
||||
get_int(head, 'windowLeft'),
|
||||
get_int(head, 'windowBottom'),
|
||||
get_int(head, 'windowRight'),
|
||||
outlines=_get_outlines(body)
|
||||
)
|
||||
|
||||
|
||||
def parse_opml_file(filename: str) -> OPML:
|
||||
"""Parse an OPML document from a local XML file."""
|
||||
root = parse_xml(filename).getroot()
|
||||
return _parse_opml(root)
|
||||
|
||||
|
||||
def parse_opml_bytes(data: bytes) -> OPML:
|
||||
"""Parse an OPML document from a byte-string containing XML data."""
|
||||
root = parse_xml(BytesIO(data)).getroot()
|
||||
return _parse_opml(root)
|
||||
|
||||
|
||||
def get_feed_list(opml_obj: OPML) -> List[str]:
|
||||
"""Walk an OPML document to extract the list of feed it contains."""
|
||||
rv = list()
|
||||
|
||||
def collect(obj):
|
||||
for outline in obj.outlines:
|
||||
if outline.type == 'rss' and outline.xml_url:
|
||||
rv.append(outline.xml_url)
|
||||
|
||||
if outline.outlines:
|
||||
collect(outline)
|
||||
|
||||
collect(opml_obj)
|
||||
return rv
|
||||
@@ -1,221 +0,0 @@
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
from typing import Optional, List
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
import attr
|
||||
|
||||
from .utils import (
|
||||
parse_xml, get_child, get_text, get_int, get_datetime, FeedParseError
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class RSSImage:
|
||||
url: str = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
link: str = attr.ib()
|
||||
width: int = attr.ib()
|
||||
height: int = attr.ib()
|
||||
description: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class RSSEnclosure:
|
||||
url: str = attr.ib()
|
||||
length: Optional[int] = attr.ib()
|
||||
type: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class RSSSource:
|
||||
title: str = attr.ib()
|
||||
url: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class RSSItem:
|
||||
title: Optional[str] = attr.ib()
|
||||
link: Optional[str] = attr.ib()
|
||||
description: Optional[str] = attr.ib()
|
||||
author: Optional[str] = attr.ib()
|
||||
categories: List[str] = attr.ib()
|
||||
comments: Optional[str] = attr.ib()
|
||||
enclosures: List[RSSEnclosure] = attr.ib()
|
||||
guid: Optional[str] = attr.ib()
|
||||
pub_date: Optional[datetime] = attr.ib()
|
||||
source: Optional[RSSSource] = attr.ib()
|
||||
|
||||
# Extension
|
||||
content_encoded: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class RSSChannel:
|
||||
title: Optional[str] = attr.ib()
|
||||
link: Optional[str] = attr.ib()
|
||||
description: Optional[str] = attr.ib()
|
||||
language: Optional[str] = attr.ib()
|
||||
copyright: Optional[str] = attr.ib()
|
||||
managing_editor: Optional[str] = attr.ib()
|
||||
web_master: Optional[str] = attr.ib()
|
||||
pub_date: Optional[datetime] = attr.ib()
|
||||
last_build_date: Optional[datetime] = attr.ib()
|
||||
categories: List[str] = attr.ib()
|
||||
generator: Optional[str] = attr.ib()
|
||||
docs: Optional[str] = attr.ib()
|
||||
ttl: Optional[int] = attr.ib()
|
||||
image: Optional[RSSImage] = attr.ib()
|
||||
|
||||
items: List[RSSItem] = attr.ib()
|
||||
|
||||
# Extension
|
||||
content_encoded: Optional[str] = attr.ib()
|
||||
|
||||
|
||||
def _get_image(element: Element, name,
|
||||
optional: bool=True) -> Optional[RSSImage]:
|
||||
child = get_child(element, name, optional)
|
||||
if child is None:
|
||||
return None
|
||||
|
||||
return RSSImage(
|
||||
get_text(child, 'url', optional=False),
|
||||
get_text(child, 'title'),
|
||||
get_text(child, 'link', optional=False),
|
||||
get_int(child, 'width') or 88,
|
||||
get_int(child, 'height') or 31,
|
||||
get_text(child, 'description')
|
||||
)
|
||||
|
||||
|
||||
def _get_source(element: Element, name,
|
||||
optional: bool=True) -> Optional[RSSSource]:
|
||||
child = get_child(element, name, optional)
|
||||
if child is None:
|
||||
return None
|
||||
|
||||
return RSSSource(
|
||||
child.text.strip(),
|
||||
child.attrib.get('url'),
|
||||
)
|
||||
|
||||
|
||||
def _get_enclosure(element: Element) -> RSSEnclosure:
|
||||
length = element.attrib.get('length')
|
||||
try:
|
||||
length = int(length)
|
||||
except (TypeError, ValueError):
|
||||
length = None
|
||||
|
||||
return RSSEnclosure(
|
||||
element.attrib['url'],
|
||||
length,
|
||||
element.attrib.get('type'),
|
||||
)
|
||||
|
||||
|
||||
def _get_link(element: Element) -> Optional[str]:
|
||||
"""Attempt to retrieve item link.
|
||||
|
||||
Use the GUID as a fallback if it is a permalink.
|
||||
"""
|
||||
link = get_text(element, 'link')
|
||||
if link is not None:
|
||||
return link
|
||||
|
||||
guid = get_child(element, 'guid')
|
||||
if guid is not None and guid.attrib.get('isPermaLink') == 'true':
|
||||
return get_text(element, 'guid')
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _get_item(element: Element) -> RSSItem:
|
||||
root = element
|
||||
|
||||
title = get_text(root, 'title')
|
||||
link = _get_link(root)
|
||||
description = get_text(root, 'description')
|
||||
author = get_text(root, 'author')
|
||||
categories = [e.text for e in root.findall('category')]
|
||||
comments = get_text(root, 'comments')
|
||||
enclosure = [_get_enclosure(e) for e in root.findall('enclosure')]
|
||||
guid = get_text(root, 'guid')
|
||||
pub_date = get_datetime(root, 'pubDate')
|
||||
source = _get_source(root, 'source')
|
||||
|
||||
content_encoded = get_text(root, 'content:encoded')
|
||||
|
||||
return RSSItem(
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
author,
|
||||
categories,
|
||||
comments,
|
||||
enclosure,
|
||||
guid,
|
||||
pub_date,
|
||||
source,
|
||||
content_encoded
|
||||
)
|
||||
|
||||
|
||||
def _parse_rss(root: Element) -> RSSChannel:
|
||||
rss_version = root.get('version')
|
||||
if rss_version != '2.0':
|
||||
raise FeedParseError('Cannot process RSS feed version "{}"'
|
||||
.format(rss_version))
|
||||
|
||||
root = root.find('channel')
|
||||
|
||||
title = get_text(root, 'title')
|
||||
link = get_text(root, 'link')
|
||||
description = get_text(root, 'description')
|
||||
language = get_text(root, 'language')
|
||||
copyright = get_text(root, 'copyright')
|
||||
managing_editor = get_text(root, 'managingEditor')
|
||||
web_master = get_text(root, 'webMaster')
|
||||
pub_date = get_datetime(root, 'pubDate')
|
||||
last_build_date = get_datetime(root, 'lastBuildDate')
|
||||
categories = [e.text for e in root.findall('category')]
|
||||
generator = get_text(root, 'generator')
|
||||
docs = get_text(root, 'docs')
|
||||
ttl = get_int(root, 'ttl')
|
||||
|
||||
image = _get_image(root, 'image')
|
||||
items = [_get_item(e) for e in root.findall('item')]
|
||||
|
||||
content_encoded = get_text(root, 'content:encoded')
|
||||
|
||||
return RSSChannel(
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
language,
|
||||
copyright,
|
||||
managing_editor,
|
||||
web_master,
|
||||
pub_date,
|
||||
last_build_date,
|
||||
categories,
|
||||
generator,
|
||||
docs,
|
||||
ttl,
|
||||
image,
|
||||
items,
|
||||
content_encoded
|
||||
)
|
||||
|
||||
|
||||
def parse_rss_file(filename: str) -> RSSChannel:
|
||||
"""Parse an RSS feed from a local XML file."""
|
||||
root = parse_xml(filename).getroot()
|
||||
return _parse_rss(root)
|
||||
|
||||
|
||||
def parse_rss_bytes(data: bytes) -> RSSChannel:
|
||||
"""Parse an RSS feed from a byte-string containing XML data."""
|
||||
root = parse_xml(BytesIO(data)).getroot()
|
||||
return _parse_rss(root)
|
||||
@@ -1,224 +0,0 @@
|
||||
"""Simple API that abstracts away the differences between feed types."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import html
|
||||
import os
|
||||
from typing import Optional, List, Tuple
|
||||
import urllib.parse
|
||||
|
||||
import attr
|
||||
|
||||
from . import atom, rss, json_feed
|
||||
from .exceptions import (
|
||||
FeedParseError, FeedDocumentError, FeedXMLError, FeedJSONError
|
||||
)
|
||||
|
||||
|
||||
@attr.s
|
||||
class Attachment:
|
||||
link: str = attr.ib()
|
||||
mime_type: Optional[str] = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
size_in_bytes: Optional[int] = attr.ib()
|
||||
duration: Optional[timedelta] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class Article:
|
||||
id: str = attr.ib()
|
||||
title: Optional[str] = attr.ib()
|
||||
link: Optional[str] = attr.ib()
|
||||
content: str = attr.ib()
|
||||
published_at: Optional[datetime] = attr.ib()
|
||||
updated_at: Optional[datetime] = attr.ib()
|
||||
attachments: List[Attachment] = attr.ib()
|
||||
|
||||
|
||||
@attr.s
|
||||
class Feed:
|
||||
title: str = attr.ib()
|
||||
subtitle: Optional[str] = attr.ib()
|
||||
link: Optional[str] = attr.ib()
|
||||
updated_at: Optional[datetime] = attr.ib()
|
||||
articles: List[Article] = attr.ib()
|
||||
|
||||
|
||||
def _adapt_atom_feed(atom_feed: atom.AtomFeed) -> Feed:
|
||||
articles = list()
|
||||
for entry in atom_feed.entries:
|
||||
if entry.content is not None:
|
||||
content = entry.content.value
|
||||
elif entry.summary is not None:
|
||||
content = entry.summary.value
|
||||
else:
|
||||
content = ''
|
||||
published_at, updated_at = _get_article_dates(entry.published,
|
||||
entry.updated)
|
||||
# Find article link and attachments
|
||||
article_link = None
|
||||
attachments = list()
|
||||
for candidate_link in entry.links:
|
||||
if candidate_link.rel in ('alternate', None):
|
||||
article_link = candidate_link.href
|
||||
elif candidate_link.rel == 'enclosure':
|
||||
attachments.append(Attachment(
|
||||
title=_get_attachment_title(candidate_link.title,
|
||||
candidate_link.href),
|
||||
link=candidate_link.href,
|
||||
mime_type=candidate_link.type_,
|
||||
size_in_bytes=candidate_link.length,
|
||||
duration=None
|
||||
))
|
||||
|
||||
if entry.title is None:
|
||||
entry_title = None
|
||||
elif entry.title.text_type in (atom.AtomTextType.html,
|
||||
atom.AtomTextType.xhtml):
|
||||
entry_title = html.unescape(entry.title.value).strip()
|
||||
else:
|
||||
entry_title = entry.title.value
|
||||
|
||||
articles.append(Article(
|
||||
entry.id_,
|
||||
entry_title,
|
||||
article_link,
|
||||
content,
|
||||
published_at,
|
||||
updated_at,
|
||||
attachments
|
||||
))
|
||||
|
||||
# Find feed link
|
||||
link = None
|
||||
for candidate_link in atom_feed.links:
|
||||
if candidate_link.rel == 'self':
|
||||
link = candidate_link.href
|
||||
break
|
||||
|
||||
return Feed(
|
||||
atom_feed.title.value if atom_feed.title else atom_feed.id_,
|
||||
atom_feed.subtitle.value if atom_feed.subtitle else None,
|
||||
link,
|
||||
atom_feed.updated,
|
||||
articles
|
||||
)
|
||||
|
||||
|
||||
def _adapt_rss_channel(rss_channel: rss.RSSChannel) -> Feed:
|
||||
articles = list()
|
||||
for item in rss_channel.items:
|
||||
attachments = [
|
||||
Attachment(link=e.url, mime_type=e.type, size_in_bytes=e.length,
|
||||
title=_get_attachment_title(None, e.url), duration=None)
|
||||
for e in item.enclosures
|
||||
]
|
||||
articles.append(Article(
|
||||
item.guid or item.link,
|
||||
item.title,
|
||||
item.link,
|
||||
item.content_encoded or item.description or '',
|
||||
item.pub_date,
|
||||
None,
|
||||
attachments
|
||||
))
|
||||
|
||||
if rss_channel.title is None and rss_channel.link is None:
|
||||
raise FeedParseError('RSS feed does not have a title nor a link')
|
||||
|
||||
return Feed(
|
||||
rss_channel.title if rss_channel.title else rss_channel.link,
|
||||
rss_channel.description,
|
||||
rss_channel.link,
|
||||
rss_channel.pub_date,
|
||||
articles
|
||||
)
|
||||
|
||||
|
||||
def _adapt_json_feed(json_feed: json_feed.JSONFeed) -> Feed:
|
||||
articles = list()
|
||||
for item in json_feed.items:
|
||||
attachments = [
|
||||
Attachment(a.url, a.mime_type,
|
||||
_get_attachment_title(a.title, a.url),
|
||||
a.size_in_bytes, a.duration)
|
||||
for a in item.attachments
|
||||
]
|
||||
articles.append(Article(
|
||||
item.id_,
|
||||
item.title,
|
||||
item.url,
|
||||
item.content_html or item.content_text or '',
|
||||
item.date_published,
|
||||
item.date_modified,
|
||||
attachments
|
||||
))
|
||||
|
||||
return Feed(
|
||||
json_feed.title,
|
||||
json_feed.description,
|
||||
json_feed.feed_url,
|
||||
None,
|
||||
articles
|
||||
)
|
||||
|
||||
|
||||
def _get_article_dates(published_at: Optional[datetime],
|
||||
updated_at: Optional[datetime]
|
||||
) -> Tuple[Optional[datetime], Optional[datetime]]:
|
||||
if published_at and updated_at:
|
||||
return published_at, updated_at
|
||||
|
||||
if updated_at:
|
||||
return updated_at, None
|
||||
|
||||
if published_at:
|
||||
return published_at, None
|
||||
|
||||
raise FeedParseError('Article does not have proper dates')
|
||||
|
||||
|
||||
def _get_attachment_title(attachment_title: Optional[str], link: str) -> str:
|
||||
if attachment_title:
|
||||
return attachment_title
|
||||
|
||||
parsed_link = urllib.parse.urlparse(link)
|
||||
return os.path.basename(parsed_link.path)
|
||||
|
||||
|
||||
def _simple_parse(pairs, content) -> Feed:
|
||||
is_xml = True
|
||||
is_json = True
|
||||
for parser, adapter in pairs:
|
||||
try:
|
||||
return adapter(parser(content))
|
||||
except FeedXMLError:
|
||||
is_xml = False
|
||||
except FeedJSONError:
|
||||
is_json = False
|
||||
except FeedParseError:
|
||||
continue
|
||||
|
||||
if not is_xml and not is_json:
|
||||
raise FeedDocumentError('File is not a supported feed type')
|
||||
|
||||
raise FeedParseError('File is not a valid supported feed')
|
||||
|
||||
|
||||
def simple_parse_file(filename: str) -> Feed:
|
||||
"""Parse an Atom, RSS or JSON feed from a local file."""
|
||||
pairs = (
|
||||
(rss.parse_rss_file, _adapt_rss_channel),
|
||||
(atom.parse_atom_file, _adapt_atom_feed),
|
||||
(json_feed.parse_json_feed_file, _adapt_json_feed)
|
||||
)
|
||||
return _simple_parse(pairs, filename)
|
||||
|
||||
|
||||
def simple_parse_bytes(data: bytes) -> Feed:
|
||||
"""Parse an Atom, RSS or JSON feed from a byte-string containing data."""
|
||||
pairs = (
|
||||
(rss.parse_rss_bytes, _adapt_rss_channel),
|
||||
(atom.parse_atom_bytes, _adapt_atom_feed),
|
||||
(json_feed.parse_json_feed_bytes, _adapt_json_feed)
|
||||
)
|
||||
return _simple_parse(pairs, data)
|
||||
@@ -1,84 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
from xml.etree.ElementTree import Element
|
||||
from typing import Optional
|
||||
|
||||
import dateutil.parser
|
||||
from defusedxml.ElementTree import parse as defused_xml_parse, ParseError
|
||||
|
||||
from .exceptions import FeedXMLError, FeedParseError
|
||||
|
||||
ns = {
|
||||
'content': 'http://purl.org/rss/1.0/modules/content/',
|
||||
'feed': 'http://www.w3.org/2005/Atom'
|
||||
}
|
||||
|
||||
|
||||
def parse_xml(xml_content):
|
||||
try:
|
||||
return defused_xml_parse(xml_content)
|
||||
except ParseError:
|
||||
raise FeedXMLError('Not a valid XML document')
|
||||
|
||||
|
||||
def get_child(element: Element, name,
|
||||
optional: bool=True) -> Optional[Element]:
|
||||
child = element.find(name, namespaces=ns)
|
||||
|
||||
if child is None and not optional:
|
||||
raise FeedParseError(
|
||||
'Could not parse feed: "{}" does not have a "{}"'
|
||||
.format(element.tag, name)
|
||||
)
|
||||
|
||||
elif child is None:
|
||||
return None
|
||||
|
||||
return child
|
||||
|
||||
|
||||
def get_text(element: Element, name, optional: bool=True) -> Optional[str]:
|
||||
child = get_child(element, name, optional)
|
||||
if child is None:
|
||||
return None
|
||||
|
||||
if child.text is None:
|
||||
if optional:
|
||||
return None
|
||||
|
||||
raise FeedParseError(
|
||||
'Could not parse feed: "{}" text is required but is empty'
|
||||
.format(name)
|
||||
)
|
||||
|
||||
return child.text.strip()
|
||||
|
||||
|
||||
def get_int(element: Element, name, optional: bool=True) -> Optional[int]:
|
||||
text = get_text(element, name, optional)
|
||||
if text is None:
|
||||
return None
|
||||
|
||||
return int(text)
|
||||
|
||||
|
||||
def get_datetime(element: Element, name,
|
||||
optional: bool=True) -> Optional[datetime]:
|
||||
text = get_text(element, name, optional)
|
||||
if text is None:
|
||||
return None
|
||||
|
||||
return try_parse_date(text)
|
||||
|
||||
|
||||
def try_parse_date(date_str: str) -> Optional[datetime]:
|
||||
try:
|
||||
date = dateutil.parser.parse(date_str, fuzzy=True)
|
||||
except (ValueError, OverflowError):
|
||||
return None
|
||||
|
||||
if date.tzinfo is None:
|
||||
# TZ naive datetime, make it a TZ aware datetime by assuming it
|
||||
# contains UTC time
|
||||
date = date.replace(tzinfo=timezone.utc)
|
||||
|
||||
return date
|
||||
@@ -1,65 +0,0 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from functools import partial
|
||||
|
||||
from . import converters, exceptions, filters, validators
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._funcs import asdict, assoc, astuple, evolve, has
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
attrib,
|
||||
attrs,
|
||||
fields,
|
||||
fields_dict,
|
||||
make_class,
|
||||
validate,
|
||||
)
|
||||
|
||||
|
||||
__version__ = "18.2.0"
|
||||
|
||||
__title__ = "attrs"
|
||||
__description__ = "Classes Without Boilerplate"
|
||||
__url__ = "https://www.attrs.org/"
|
||||
__uri__ = __url__
|
||||
__doc__ = __description__ + " <" + __uri__ + ">"
|
||||
|
||||
__author__ = "Hynek Schlawack"
|
||||
__email__ = "hs@ox.cx"
|
||||
|
||||
__license__ = "MIT"
|
||||
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
|
||||
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||
|
||||
__all__ = [
|
||||
"Attribute",
|
||||
"Factory",
|
||||
"NOTHING",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"attr",
|
||||
"attrib",
|
||||
"attributes",
|
||||
"attrs",
|
||||
"converters",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"fields",
|
||||
"fields_dict",
|
||||
"filters",
|
||||
"get_run_validators",
|
||||
"has",
|
||||
"ib",
|
||||
"make_class",
|
||||
"s",
|
||||
"set_run_validators",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
@@ -1,252 +0,0 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Mapping,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
# `import X as X` is required to make these public
|
||||
from . import exceptions as exceptions
|
||||
from . import filters as filters
|
||||
from . import converters as converters
|
||||
from . import validators as validators
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_ValidatorType = Callable[[Any, Attribute, _T], Any]
|
||||
_ConverterType = Callable[[Any], _T]
|
||||
_FilterType = Callable[[Attribute, Any], bool]
|
||||
# FIXME: in reality, if multiple validators are passed they must be in a list or tuple,
|
||||
# but those are invariant and so would prevent subtypes of _ValidatorType from working
|
||||
# when passed in a list or tuple.
|
||||
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
||||
|
||||
# _make --
|
||||
|
||||
NOTHING: object
|
||||
|
||||
# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)`
|
||||
# Work around mypy issue #4554 in the common case by using an overload.
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
||||
takes_self: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
class Attribute(Generic[_T]):
|
||||
name: str
|
||||
default: Optional[_T]
|
||||
validator: Optional[_ValidatorType[_T]]
|
||||
repr: bool
|
||||
cmp: bool
|
||||
hash: Optional[bool]
|
||||
init: bool
|
||||
converter: Optional[_ConverterType[_T]]
|
||||
metadata: Dict[Any, Any]
|
||||
type: Optional[Type[_T]]
|
||||
kw_only: bool
|
||||
def __lt__(self, x: Attribute) -> bool: ...
|
||||
def __le__(self, x: Attribute) -> bool: ...
|
||||
def __gt__(self, x: Attribute) -> bool: ...
|
||||
def __ge__(self, x: Attribute) -> bool: ...
|
||||
|
||||
# NOTE: We had several choices for the annotation to use for type arg:
|
||||
# 1) Type[_T]
|
||||
# - Pros: Handles simple cases correctly
|
||||
# - Cons: Might produce less informative errors in the case of conflicting TypeVars
|
||||
# e.g. `attr.ib(default='bad', type=int)`
|
||||
# 2) Callable[..., _T]
|
||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||
# - Cons: Terrible error messages for validator checks.
|
||||
# e.g. attr.ib(type=int, validator=validate_str)
|
||||
# -> error: Cannot infer function type argument
|
||||
# 3) type (and do all of the work in the mypy plugin)
|
||||
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||
# We chose option #1.
|
||||
|
||||
# `attr` lies about its return type to make the following possible:
|
||||
# attr() -> Any
|
||||
# attr(8) -> int
|
||||
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||
# This makes this type of assignments possible:
|
||||
# x: int = attr(8)
|
||||
#
|
||||
# This form catches explicit None or no default but with no other arguments returns Any.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
convert: None = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the other arguments.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
convert: Optional[_ConverterType[_T]] = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType[_T]] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
convert: Optional[_ConverterType[_T]] = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType[_T]] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def attrib(
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
convert: Optional[_ConverterType[_T]] = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: object = ...,
|
||||
converter: Optional[_ConverterType[_T]] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
def attrs(
|
||||
maybe_cls: _C,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
def attrs(
|
||||
maybe_cls: None = ...,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
class _Fields(Tuple[Attribute, ...]):
|
||||
def __getattr__(self, name: str) -> Attribute: ...
|
||||
|
||||
def fields(cls: type) -> _Fields: ...
|
||||
def fields_dict(cls: type) -> Dict[str, Attribute]: ...
|
||||
def validate(inst: Any) -> None: ...
|
||||
|
||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid
|
||||
def make_class(
|
||||
name: str,
|
||||
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
||||
bases: Tuple[type, ...] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
) -> type: ...
|
||||
|
||||
# _funcs --
|
||||
|
||||
# TODO: add support for returning TypedDict from the mypy plugin
|
||||
# FIXME: asdict/astuple do not honor their factory args. waiting on one of these:
|
||||
# https://github.com/python/mypy/issues/4236
|
||||
# https://github.com/python/typing/issues/253
|
||||
def asdict(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType] = ...,
|
||||
tuple_factory: Type[Sequence] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
def has(cls: type) -> bool: ...
|
||||
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||
|
||||
# _config --
|
||||
|
||||
def set_run_validators(run: bool) -> None: ...
|
||||
def get_run_validators() -> bool: ...
|
||||
|
||||
# aliases --
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
||||
@@ -1,163 +0,0 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
|
||||
|
||||
if PYPY or sys.version_info[:2] >= (3, 6):
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
if PY2:
|
||||
from UserDict import IterableUserDict
|
||||
|
||||
# We 'bundle' isclass instead of using inspect as importing inspect is
|
||||
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
|
||||
def isclass(klass):
|
||||
return isinstance(klass, (type, types.ClassType))
|
||||
|
||||
# TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
|
||||
TYPE = "type"
|
||||
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
# Python 2 is bereft of a read-only dict proxy, so we make one!
|
||||
class ReadOnlyDict(IterableUserDict):
|
||||
"""
|
||||
Best-effort read-only dict wrapper.
|
||||
"""
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item assignment"
|
||||
)
|
||||
|
||||
def update(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'update'"
|
||||
)
|
||||
|
||||
def __delitem__(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item deletion"
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'clear'"
|
||||
)
|
||||
|
||||
def pop(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'pop'"
|
||||
)
|
||||
|
||||
def popitem(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'popitem'"
|
||||
)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'setdefault'"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# Override to be identical to the Python 3 version.
|
||||
return "mappingproxy(" + repr(self.data) + ")"
|
||||
|
||||
def metadata_proxy(d):
|
||||
res = ReadOnlyDict()
|
||||
res.data.update(d) # We blocked update, so we have to do it like this.
|
||||
return res
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def isclass(klass):
|
||||
return isinstance(klass, type)
|
||||
|
||||
TYPE = "class"
|
||||
|
||||
def iteritems(d):
|
||||
return d.items()
|
||||
|
||||
def metadata_proxy(d):
|
||||
return types.MappingProxyType(dict(d))
|
||||
|
||||
|
||||
def import_ctypes():
|
||||
"""
|
||||
Moved into a function for testability.
|
||||
"""
|
||||
import ctypes
|
||||
|
||||
return ctypes
|
||||
|
||||
|
||||
if not PY2:
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Missing ctypes. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slots classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
else:
|
||||
|
||||
def just_warn(*args, **kw): # pragma: nocover
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""
|
||||
Moved into a function for testability.
|
||||
"""
|
||||
if PYPY: # pragma: no cover
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
else:
|
||||
try:
|
||||
ctypes = import_ctypes()
|
||||
|
||||
set_closure_cell = ctypes.pythonapi.PyCell_Set
|
||||
set_closure_cell.argtypes = (ctypes.py_object, ctypes.py_object)
|
||||
set_closure_cell.restype = ctypes.c_int
|
||||
except Exception:
|
||||
# We try best effort to set the cell, but sometimes it's not
|
||||
# possible. For example on Jython or on GAE.
|
||||
set_closure_cell = just_warn
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
@@ -1,23 +0,0 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
__all__ = ["set_run_validators", "get_run_validators"]
|
||||
|
||||
_run_validators = True
|
||||
|
||||
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
raise TypeError("'run' must be bool.")
|
||||
global _run_validators
|
||||
_run_validators = run
|
||||
|
||||
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
"""
|
||||
return _run_validators
|
||||
@@ -1,290 +0,0 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._make import NOTHING, _obj_setattr, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
|
||||
def asdict(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
dict_factory=dict,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a dict.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the :class:`attr.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable dict_factory: A callable to produce dictionaries from. For
|
||||
example, to produce ordered dictionaries instead of normal Python
|
||||
dictionaries, pass in ``collections.OrderedDict``.
|
||||
:param bool retain_collection_types: Do not convert to ``list`` when
|
||||
encountering an attribute whose type is ``tuple`` or ``set``. Only
|
||||
meaningful if ``recurse`` is ``True``.
|
||||
|
||||
:rtype: return type of *dict_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv[a.name] = asdict(
|
||||
v, True, filter, dict_factory, retain_collection_types
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set)):
|
||||
cf = v.__class__ if retain_collection_types is True else list
|
||||
rv[a.name] = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i, filter, dict_factory, retain_collection_types
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = dict_factory
|
||||
rv[a.name] = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk, filter, df, retain_collection_types
|
||||
),
|
||||
_asdict_anything(
|
||||
vv, filter, df, retain_collection_types
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
else:
|
||||
rv[a.name] = v
|
||||
return rv
|
||||
|
||||
|
||||
def _asdict_anything(val, filter, dict_factory, retain_collection_types):
|
||||
"""
|
||||
``asdict`` only works on attrs instances, this works on anything.
|
||||
"""
|
||||
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
||||
# Attrs class.
|
||||
rv = asdict(val, True, filter, dict_factory, retain_collection_types)
|
||||
elif isinstance(val, (tuple, list, set)):
|
||||
cf = val.__class__ if retain_collection_types is True else list
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i, filter, dict_factory, retain_collection_types
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
)
|
||||
elif isinstance(val, dict):
|
||||
df = dict_factory
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(kk, filter, df, retain_collection_types),
|
||||
_asdict_anything(vv, filter, df, retain_collection_types),
|
||||
)
|
||||
for kk, vv in iteritems(val)
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
return rv
|
||||
|
||||
|
||||
def astuple(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
tuple_factory=tuple,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a tuple.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the :class:`attr.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable tuple_factory: A callable to produce tuples from. For
|
||||
example, to produce lists instead of tuples.
|
||||
:param bool retain_collection_types: Do not convert to ``list``
|
||||
or ``dict`` when encountering an attribute which type is
|
||||
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
|
||||
``True``.
|
||||
|
||||
:rtype: return type of *tuple_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = []
|
||||
retain = retain_collection_types # Very long. :/
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv.append(
|
||||
astuple(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set)):
|
||||
cf = v.__class__ if retain is True else list
|
||||
rv.append(
|
||||
cf(
|
||||
[
|
||||
astuple(
|
||||
j,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(j.__class__)
|
||||
else j
|
||||
for j in v
|
||||
]
|
||||
)
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = v.__class__ if retain is True else dict
|
||||
rv.append(
|
||||
df(
|
||||
(
|
||||
astuple(
|
||||
kk,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(kk.__class__)
|
||||
else kk,
|
||||
astuple(
|
||||
vv,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(vv.__class__)
|
||||
else vv,
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
)
|
||||
else:
|
||||
rv.append(v)
|
||||
else:
|
||||
rv.append(v)
|
||||
return rv if tuple_factory is list else tuple_factory(rv)
|
||||
|
||||
|
||||
def has(cls):
|
||||
"""
|
||||
Check whether *cls* is a class with ``attrs`` attributes.
|
||||
|
||||
:param type cls: Class to introspect.
|
||||
:raise TypeError: If *cls* is not a class.
|
||||
|
||||
:rtype: :class:`bool`
|
||||
"""
|
||||
return getattr(cls, "__attrs_attrs__", None) is not None
|
||||
|
||||
|
||||
def assoc(inst, **changes):
|
||||
"""
|
||||
Copy *inst* and apply *changes*.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
|
||||
be found on *cls*.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use :func:`evolve` instead.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"assoc is deprecated and will be removed after 2018/01.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in iteritems(changes):
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
raise AttrsAttributeNotFoundError(
|
||||
"{k} is not an attrs attribute on {cl}.".format(
|
||||
k=k, cl=new.__class__
|
||||
)
|
||||
)
|
||||
_obj_setattr(new, k, v)
|
||||
return new
|
||||
|
||||
|
||||
def evolve(inst, **changes):
|
||||
"""
|
||||
Create a new instance, based on *inst* with *changes* applied.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise TypeError: If *attr_name* couldn't be found in the class
|
||||
``__init__``.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
cls = inst.__class__
|
||||
attrs = fields(cls)
|
||||
for a in attrs:
|
||||
if not a.init:
|
||||
continue
|
||||
attr_name = a.name # To deal with private attributes.
|
||||
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
|
||||
if init_name not in changes:
|
||||
changes[init_name] = getattr(inst, attr_name)
|
||||
return cls(**changes)
|
||||
2034
python/attr/_make.py
2034
python/attr/_make.py
File diff suppressed because it is too large
Load Diff
@@ -1,78 +0,0 @@
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._make import NOTHING, Factory
|
||||
|
||||
|
||||
def optional(converter):
|
||||
"""
|
||||
A converter that allows an attribute to be optional. An optional attribute
|
||||
is one which can be set to ``None``.
|
||||
|
||||
:param callable converter: the converter that is used for non-``None``
|
||||
values.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
def optional_converter(val):
|
||||
if val is None:
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
return optional_converter
|
||||
|
||||
|
||||
def default_if_none(default=NOTHING, factory=None):
|
||||
"""
|
||||
A converter that allows to replace ``None`` values by *default* or the
|
||||
result of *factory*.
|
||||
|
||||
:param default: Value to be used if ``None`` is passed. Passing an instance
|
||||
of :class:`attr.Factory` is supported, however the ``takes_self`` option
|
||||
is *not*.
|
||||
:param callable factory: A callable that takes not parameters whose result
|
||||
is used if ``None`` is passed.
|
||||
|
||||
:raises TypeError: If **neither** *default* or *factory* is passed.
|
||||
:raises TypeError: If **both** *default* and *factory* are passed.
|
||||
:raises ValueError: If an instance of :class:`attr.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
if default is NOTHING and factory is None:
|
||||
raise TypeError("Must pass either `default` or `factory`.")
|
||||
|
||||
if default is not NOTHING and factory is not None:
|
||||
raise TypeError(
|
||||
"Must pass either `default` or `factory` but not both."
|
||||
)
|
||||
|
||||
if factory is not None:
|
||||
default = Factory(factory)
|
||||
|
||||
if isinstance(default, Factory):
|
||||
if default.takes_self:
|
||||
raise ValueError(
|
||||
"`takes_self` is not supported by default_if_none."
|
||||
)
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default.factory()
|
||||
|
||||
else:
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
@@ -1,12 +0,0 @@
|
||||
from typing import TypeVar, Optional, Callable, overload
|
||||
from . import _ConverterType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def optional(
|
||||
converter: _ConverterType[_T]
|
||||
) -> _ConverterType[Optional[_T]]: ...
|
||||
@overload
|
||||
def default_if_none(default: _T) -> _ConverterType[_T]: ...
|
||||
@overload
|
||||
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ...
|
||||
@@ -1,57 +0,0 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class FrozenInstanceError(AttributeError):
|
||||
"""
|
||||
A frozen/immutable instance has been attempted to be modified.
|
||||
|
||||
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||
and subclassing :exc:`AttributeError`.
|
||||
|
||||
.. versionadded:: 16.1.0
|
||||
"""
|
||||
|
||||
msg = "can't set attribute"
|
||||
args = [msg]
|
||||
|
||||
|
||||
class AttrsAttributeNotFoundError(ValueError):
|
||||
"""
|
||||
An ``attrs`` function couldn't find an attribute that the user asked for.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotAnAttrsClassError(ValueError):
|
||||
"""
|
||||
A non-``attrs`` class has been passed into an ``attrs`` function.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class DefaultAlreadySetError(RuntimeError):
|
||||
"""
|
||||
A default has been set using ``attr.ib()`` and is attempted to be reset
|
||||
using the decorator.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
|
||||
class UnannotatedAttributeError(RuntimeError):
|
||||
"""
|
||||
A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
|
||||
annotation.
|
||||
|
||||
.. versionadded:: 17.3.0
|
||||
"""
|
||||
|
||||
|
||||
class PythonTooOldError(RuntimeError):
|
||||
"""
|
||||
An ``attrs`` feature requiring a more recent python version has been used.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
@@ -1,7 +0,0 @@
|
||||
class FrozenInstanceError(AttributeError):
|
||||
msg: str = ...
|
||||
|
||||
class AttrsAttributeNotFoundError(ValueError): ...
|
||||
class NotAnAttrsClassError(ValueError): ...
|
||||
class DefaultAlreadySetError(RuntimeError): ...
|
||||
class UnannotatedAttributeError(RuntimeError): ...
|
||||
@@ -1,52 +0,0 @@
|
||||
"""
|
||||
Commonly useful filters for :func:`attr.asdict`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import isclass
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
def _split_what(what):
|
||||
"""
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isclass(cls)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Whitelist *what*.
|
||||
|
||||
:param what: What to whitelist.
|
||||
:type what: :class:`list` of :class:`type` or :class:`attr.Attribute`\\ s
|
||||
|
||||
:rtype: :class:`callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def include_(attribute, value):
|
||||
return value.__class__ in cls or attribute in attrs
|
||||
|
||||
return include_
|
||||
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Blacklist *what*.
|
||||
|
||||
:param what: What to blacklist.
|
||||
:type what: :class:`list` of classes or :class:`attr.Attribute`\\ s.
|
||||
|
||||
:rtype: :class:`callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def exclude_(attribute, value):
|
||||
return value.__class__ not in cls and attribute not in attrs
|
||||
|
||||
return exclude_
|
||||
@@ -1,5 +0,0 @@
|
||||
from typing import Union
|
||||
from . import Attribute, _FilterType
|
||||
|
||||
def include(*what: Union[type, Attribute]) -> _FilterType: ...
|
||||
def exclude(*what: Union[type, Attribute]) -> _FilterType: ...
|
||||
@@ -1,170 +0,0 @@
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
|
||||
|
||||
__all__ = ["and_", "in_", "instance_of", "optional", "provides"]
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InstanceOfValidator(object):
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not isinstance(value, self.type):
|
||||
raise TypeError(
|
||||
"'{name}' must be {type!r} (got {value!r} that is a "
|
||||
"{actual!r}).".format(
|
||||
name=attr.name,
|
||||
type=self.type,
|
||||
actual=value.__class__,
|
||||
value=value,
|
||||
),
|
||||
attr,
|
||||
self.type,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<instance_of validator for type {type!r}>".format(
|
||||
type=self.type
|
||||
)
|
||||
|
||||
|
||||
def instance_of(type):
|
||||
"""
|
||||
A validator that raises a :exc:`TypeError` if the initializer is called
|
||||
with a wrong type for this particular attribute (checks are performed using
|
||||
:func:`isinstance` therefore it's also valid to pass a tuple of types).
|
||||
|
||||
:param type: The type to check for.
|
||||
:type type: type or tuple of types
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type :class:`attr.Attribute`), the expected type, and the value it
|
||||
got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _ProvidesValidator(object):
|
||||
interface = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.interface.providedBy(value):
|
||||
raise TypeError(
|
||||
"'{name}' must provide {interface!r} which {value!r} "
|
||||
"doesn't.".format(
|
||||
name=attr.name, interface=self.interface, value=value
|
||||
),
|
||||
attr,
|
||||
self.interface,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<provides validator for interface {interface!r}>".format(
|
||||
interface=self.interface
|
||||
)
|
||||
|
||||
|
||||
def provides(interface):
|
||||
"""
|
||||
A validator that raises a :exc:`TypeError` if the initializer is called
|
||||
with an object that does not provide the requested *interface* (checks are
|
||||
performed using ``interface.providedBy(value)`` (see `zope.interface
|
||||
<https://zopeinterface.readthedocs.io/en/latest/>`_).
|
||||
|
||||
:param zope.interface.Interface interface: The interface to check for.
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type :class:`attr.Attribute`), the expected interface, and the
|
||||
value it got.
|
||||
"""
|
||||
return _ProvidesValidator(interface)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _OptionalValidator(object):
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
self.validator(inst, attr, value)
|
||||
|
||||
def __repr__(self):
|
||||
return "<optional validator for {what} or None>".format(
|
||||
what=repr(self.validator)
|
||||
)
|
||||
|
||||
|
||||
def optional(validator):
|
||||
"""
|
||||
A validator that makes an attribute optional. An optional attribute is one
|
||||
which can be set to ``None`` in addition to satisfying the requirements of
|
||||
the sub-validator.
|
||||
|
||||
:param validator: A validator (or a list of validators) that is used for
|
||||
non-``None`` values.
|
||||
:type validator: callable or :class:`list` of callables.
|
||||
|
||||
.. versionadded:: 15.1.0
|
||||
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
|
||||
"""
|
||||
if isinstance(validator, list):
|
||||
return _OptionalValidator(_AndValidator(validator))
|
||||
return _OptionalValidator(validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InValidator(object):
|
||||
options = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
try:
|
||||
in_options = value in self.options
|
||||
except TypeError as e: # e.g. `1 in "abc"`
|
||||
in_options = False
|
||||
|
||||
if not in_options:
|
||||
raise ValueError(
|
||||
"'{name}' must be in {options!r} (got {value!r})".format(
|
||||
name=attr.name, options=self.options, value=value
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<in_ validator with options {options!r}>".format(
|
||||
options=self.options
|
||||
)
|
||||
|
||||
|
||||
def in_(options):
|
||||
"""
|
||||
A validator that raises a :exc:`ValueError` if the initializer is called
|
||||
with a value that does not belong in the options provided. The check is
|
||||
performed using ``value in options``.
|
||||
|
||||
:param options: Allowed options.
|
||||
:type options: list, tuple, :class:`enum.Enum`, ...
|
||||
|
||||
:raises ValueError: With a human readable error message, the attribute (of
|
||||
type :class:`attr.Attribute`), the expected options, and the value it
|
||||
got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
return _InValidator(options)
|
||||
@@ -1,14 +0,0 @@
|
||||
from typing import Container, List, Union, TypeVar, Type, Any, Optional, Tuple
|
||||
from . import _ValidatorType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def instance_of(
|
||||
type: Union[Tuple[Type[_T], ...], Type[_T]]
|
||||
) -> _ValidatorType[_T]: ...
|
||||
def provides(interface: Any) -> _ValidatorType[Any]: ...
|
||||
def optional(
|
||||
validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
|
||||
) -> _ValidatorType[Optional[_T]]: ...
|
||||
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||
@@ -1,56 +0,0 @@
|
||||
# Copyright 2016 The Brotli Authors. All rights reserved.
|
||||
#
|
||||
# Distributed under MIT license.
|
||||
# See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
|
||||
"""Functions to compress and decompress data using the Brotli library."""
|
||||
|
||||
import _brotli
|
||||
|
||||
|
||||
# The library version.
|
||||
__version__ = _brotli.__version__
|
||||
|
||||
# The compression mode.
|
||||
MODE_GENERIC = _brotli.MODE_GENERIC
|
||||
MODE_TEXT = _brotli.MODE_TEXT
|
||||
MODE_FONT = _brotli.MODE_FONT
|
||||
|
||||
# The Compressor object.
|
||||
Compressor = _brotli.Compressor
|
||||
|
||||
# The Decompressor object.
|
||||
Decompressor = _brotli.Decompressor
|
||||
|
||||
# Compress a byte string.
|
||||
def compress(string, mode=MODE_GENERIC, quality=11, lgwin=22, lgblock=0):
|
||||
"""Compress a byte string.
|
||||
|
||||
Args:
|
||||
string (bytes): The input data.
|
||||
mode (int, optional): The compression mode can be MODE_GENERIC (default),
|
||||
MODE_TEXT (for UTF-8 format text input) or MODE_FONT (for WOFF 2.0).
|
||||
quality (int, optional): Controls the compression-speed vs compression-
|
||||
density tradeoff. The higher the quality, the slower the compression.
|
||||
Range is 0 to 11. Defaults to 11.
|
||||
lgwin (int, optional): Base 2 logarithm of the sliding window size. Range
|
||||
is 10 to 24. Defaults to 22.
|
||||
lgblock (int, optional): Base 2 logarithm of the maximum input block size.
|
||||
Range is 16 to 24. If set to 0, the value will be set based on the
|
||||
quality. Defaults to 0.
|
||||
|
||||
Returns:
|
||||
The compressed byte string.
|
||||
|
||||
Raises:
|
||||
brotli.error: If arguments are invalid, or compressor fails.
|
||||
"""
|
||||
compressor = Compressor(mode=mode, quality=quality, lgwin=lgwin,
|
||||
lgblock=lgblock)
|
||||
return compressor.process(string) + compressor.finish()
|
||||
|
||||
# Decompress a compressed byte string.
|
||||
decompress = _brotli.decompress
|
||||
|
||||
# Raised if compression or decompression fails.
|
||||
error = _brotli.error
|
||||
@@ -1,97 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
click
|
||||
~~~~~
|
||||
|
||||
Click is a simple Python module inspired by the stdlib optparse to make
|
||||
writing command line scripts fun. Unlike other modules, it's based
|
||||
around a simple API that does not come with too much magic and is
|
||||
composable.
|
||||
|
||||
:copyright: © 2014 by the Pallets team.
|
||||
:license: BSD, see LICENSE.rst for more details.
|
||||
"""
|
||||
|
||||
# Core classes
|
||||
from .core import Context, BaseCommand, Command, MultiCommand, Group, \
|
||||
CommandCollection, Parameter, Option, Argument
|
||||
|
||||
# Globals
|
||||
from .globals import get_current_context
|
||||
|
||||
# Decorators
|
||||
from .decorators import pass_context, pass_obj, make_pass_decorator, \
|
||||
command, group, argument, option, confirmation_option, \
|
||||
password_option, version_option, help_option
|
||||
|
||||
# Types
|
||||
from .types import ParamType, File, Path, Choice, IntRange, Tuple, \
|
||||
DateTime, STRING, INT, FLOAT, BOOL, UUID, UNPROCESSED, FloatRange
|
||||
|
||||
# Utilities
|
||||
from .utils import echo, get_binary_stream, get_text_stream, open_file, \
|
||||
format_filename, get_app_dir, get_os_args
|
||||
|
||||
# Terminal functions
|
||||
from .termui import prompt, confirm, get_terminal_size, echo_via_pager, \
|
||||
progressbar, clear, style, unstyle, secho, edit, launch, getchar, \
|
||||
pause
|
||||
|
||||
# Exceptions
|
||||
from .exceptions import ClickException, UsageError, BadParameter, \
|
||||
FileError, Abort, NoSuchOption, BadOptionUsage, BadArgumentUsage, \
|
||||
MissingParameter
|
||||
|
||||
# Formatting
|
||||
from .formatting import HelpFormatter, wrap_text
|
||||
|
||||
# Parsing
|
||||
from .parser import OptionParser
|
||||
|
||||
|
||||
__all__ = [
|
||||
# Core classes
|
||||
'Context', 'BaseCommand', 'Command', 'MultiCommand', 'Group',
|
||||
'CommandCollection', 'Parameter', 'Option', 'Argument',
|
||||
|
||||
# Globals
|
||||
'get_current_context',
|
||||
|
||||
# Decorators
|
||||
'pass_context', 'pass_obj', 'make_pass_decorator', 'command', 'group',
|
||||
'argument', 'option', 'confirmation_option', 'password_option',
|
||||
'version_option', 'help_option',
|
||||
|
||||
# Types
|
||||
'ParamType', 'File', 'Path', 'Choice', 'IntRange', 'Tuple',
|
||||
'DateTime', 'STRING', 'INT', 'FLOAT', 'BOOL', 'UUID', 'UNPROCESSED',
|
||||
'FloatRange',
|
||||
|
||||
# Utilities
|
||||
'echo', 'get_binary_stream', 'get_text_stream', 'open_file',
|
||||
'format_filename', 'get_app_dir', 'get_os_args',
|
||||
|
||||
# Terminal functions
|
||||
'prompt', 'confirm', 'get_terminal_size', 'echo_via_pager',
|
||||
'progressbar', 'clear', 'style', 'unstyle', 'secho', 'edit', 'launch',
|
||||
'getchar', 'pause',
|
||||
|
||||
# Exceptions
|
||||
'ClickException', 'UsageError', 'BadParameter', 'FileError',
|
||||
'Abort', 'NoSuchOption', 'BadOptionUsage', 'BadArgumentUsage',
|
||||
'MissingParameter',
|
||||
|
||||
# Formatting
|
||||
'HelpFormatter', 'wrap_text',
|
||||
|
||||
# Parsing
|
||||
'OptionParser',
|
||||
]
|
||||
|
||||
|
||||
# Controls if click should emit the warning about the use of unicode
|
||||
# literals.
|
||||
disable_unicode_literals_warning = False
|
||||
|
||||
|
||||
__version__ = '7.0'
|
||||
@@ -1,293 +0,0 @@
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
|
||||
from .utils import echo
|
||||
from .parser import split_arg_string
|
||||
from .core import MultiCommand, Option, Argument
|
||||
from .types import Choice
|
||||
|
||||
try:
|
||||
from collections import abc
|
||||
except ImportError:
|
||||
import collections as abc
|
||||
|
||||
WORDBREAK = '='
|
||||
|
||||
# Note, only BASH version 4.4 and later have the nosort option.
|
||||
COMPLETION_SCRIPT_BASH = '''
|
||||
%(complete_func)s() {
|
||||
local IFS=$'\n'
|
||||
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
|
||||
COMP_CWORD=$COMP_CWORD \\
|
||||
%(autocomplete_var)s=complete $1 ) )
|
||||
return 0
|
||||
}
|
||||
|
||||
%(complete_func)setup() {
|
||||
local COMPLETION_OPTIONS=""
|
||||
local BASH_VERSION_ARR=(${BASH_VERSION//./ })
|
||||
# Only BASH version 4.4 and later have the nosort option.
|
||||
if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] && [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then
|
||||
COMPLETION_OPTIONS="-o nosort"
|
||||
fi
|
||||
|
||||
complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s
|
||||
}
|
||||
|
||||
%(complete_func)setup
|
||||
'''
|
||||
|
||||
COMPLETION_SCRIPT_ZSH = '''
|
||||
%(complete_func)s() {
|
||||
local -a completions
|
||||
local -a completions_with_descriptions
|
||||
local -a response
|
||||
response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\
|
||||
COMP_CWORD=$((CURRENT-1)) \\
|
||||
%(autocomplete_var)s=\"complete_zsh\" \\
|
||||
%(script_names)s )}")
|
||||
|
||||
for key descr in ${(kv)response}; do
|
||||
if [[ "$descr" == "_" ]]; then
|
||||
completions+=("$key")
|
||||
else
|
||||
completions_with_descriptions+=("$key":"$descr")
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$completions_with_descriptions" ]; then
|
||||
_describe -V unsorted completions_with_descriptions -U -Q
|
||||
fi
|
||||
|
||||
if [ -n "$completions" ]; then
|
||||
compadd -U -V unsorted -Q -a completions
|
||||
fi
|
||||
compstate[insert]="automenu"
|
||||
}
|
||||
|
||||
compdef %(complete_func)s %(script_names)s
|
||||
'''
|
||||
|
||||
_invalid_ident_char_re = re.compile(r'[^a-zA-Z0-9_]')
|
||||
|
||||
|
||||
def get_completion_script(prog_name, complete_var, shell):
|
||||
cf_name = _invalid_ident_char_re.sub('', prog_name.replace('-', '_'))
|
||||
script = COMPLETION_SCRIPT_ZSH if shell == 'zsh' else COMPLETION_SCRIPT_BASH
|
||||
return (script % {
|
||||
'complete_func': '_%s_completion' % cf_name,
|
||||
'script_names': prog_name,
|
||||
'autocomplete_var': complete_var,
|
||||
}).strip() + ';'
|
||||
|
||||
|
||||
def resolve_ctx(cli, prog_name, args):
|
||||
"""
|
||||
Parse into a hierarchy of contexts. Contexts are connected through the parent variable.
|
||||
:param cli: command definition
|
||||
:param prog_name: the program that is running
|
||||
:param args: full list of args
|
||||
:return: the final context/command parsed
|
||||
"""
|
||||
ctx = cli.make_context(prog_name, args, resilient_parsing=True)
|
||||
args = ctx.protected_args + ctx.args
|
||||
while args:
|
||||
if isinstance(ctx.command, MultiCommand):
|
||||
if not ctx.command.chain:
|
||||
cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
|
||||
if cmd is None:
|
||||
return ctx
|
||||
ctx = cmd.make_context(cmd_name, args, parent=ctx,
|
||||
resilient_parsing=True)
|
||||
args = ctx.protected_args + ctx.args
|
||||
else:
|
||||
# Walk chained subcommand contexts saving the last one.
|
||||
while args:
|
||||
cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
|
||||
if cmd is None:
|
||||
return ctx
|
||||
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx,
|
||||
allow_extra_args=True,
|
||||
allow_interspersed_args=False,
|
||||
resilient_parsing=True)
|
||||
args = sub_ctx.args
|
||||
ctx = sub_ctx
|
||||
args = sub_ctx.protected_args + sub_ctx.args
|
||||
else:
|
||||
break
|
||||
return ctx
|
||||
|
||||
|
||||
def start_of_option(param_str):
|
||||
"""
|
||||
:param param_str: param_str to check
|
||||
:return: whether or not this is the start of an option declaration (i.e. starts "-" or "--")
|
||||
"""
|
||||
return param_str and param_str[:1] == '-'
|
||||
|
||||
|
||||
def is_incomplete_option(all_args, cmd_param):
|
||||
"""
|
||||
:param all_args: the full original list of args supplied
|
||||
:param cmd_param: the current command paramter
|
||||
:return: whether or not the last option declaration (i.e. starts "-" or "--") is incomplete and
|
||||
corresponds to this cmd_param. In other words whether this cmd_param option can still accept
|
||||
values
|
||||
"""
|
||||
if not isinstance(cmd_param, Option):
|
||||
return False
|
||||
if cmd_param.is_flag:
|
||||
return False
|
||||
last_option = None
|
||||
for index, arg_str in enumerate(reversed([arg for arg in all_args if arg != WORDBREAK])):
|
||||
if index + 1 > cmd_param.nargs:
|
||||
break
|
||||
if start_of_option(arg_str):
|
||||
last_option = arg_str
|
||||
|
||||
return True if last_option and last_option in cmd_param.opts else False
|
||||
|
||||
|
||||
def is_incomplete_argument(current_params, cmd_param):
|
||||
"""
|
||||
:param current_params: the current params and values for this argument as already entered
|
||||
:param cmd_param: the current command parameter
|
||||
:return: whether or not the last argument is incomplete and corresponds to this cmd_param. In
|
||||
other words whether or not the this cmd_param argument can still accept values
|
||||
"""
|
||||
if not isinstance(cmd_param, Argument):
|
||||
return False
|
||||
current_param_values = current_params[cmd_param.name]
|
||||
if current_param_values is None:
|
||||
return True
|
||||
if cmd_param.nargs == -1:
|
||||
return True
|
||||
if isinstance(current_param_values, abc.Iterable) \
|
||||
and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_user_autocompletions(ctx, args, incomplete, cmd_param):
|
||||
"""
|
||||
:param ctx: context associated with the parsed command
|
||||
:param args: full list of args
|
||||
:param incomplete: the incomplete text to autocomplete
|
||||
:param cmd_param: command definition
|
||||
:return: all the possible user-specified completions for the param
|
||||
"""
|
||||
results = []
|
||||
if isinstance(cmd_param.type, Choice):
|
||||
# Choices don't support descriptions.
|
||||
results = [(c, None)
|
||||
for c in cmd_param.type.choices if str(c).startswith(incomplete)]
|
||||
elif cmd_param.autocompletion is not None:
|
||||
dynamic_completions = cmd_param.autocompletion(ctx=ctx,
|
||||
args=args,
|
||||
incomplete=incomplete)
|
||||
results = [c if isinstance(c, tuple) else (c, None)
|
||||
for c in dynamic_completions]
|
||||
return results
|
||||
|
||||
|
||||
def get_visible_commands_starting_with(ctx, starts_with):
|
||||
"""
|
||||
:param ctx: context associated with the parsed command
|
||||
:starts_with: string that visible commands must start with.
|
||||
:return: all visible (not hidden) commands that start with starts_with.
|
||||
"""
|
||||
for c in ctx.command.list_commands(ctx):
|
||||
if c.startswith(starts_with):
|
||||
command = ctx.command.get_command(ctx, c)
|
||||
if not command.hidden:
|
||||
yield command
|
||||
|
||||
|
||||
def add_subcommand_completions(ctx, incomplete, completions_out):
|
||||
# Add subcommand completions.
|
||||
if isinstance(ctx.command, MultiCommand):
|
||||
completions_out.extend(
|
||||
[(c.name, c.get_short_help_str()) for c in get_visible_commands_starting_with(ctx, incomplete)])
|
||||
|
||||
# Walk up the context list and add any other completion possibilities from chained commands
|
||||
while ctx.parent is not None:
|
||||
ctx = ctx.parent
|
||||
if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
|
||||
remaining_commands = [c for c in get_visible_commands_starting_with(ctx, incomplete)
|
||||
if c.name not in ctx.protected_args]
|
||||
completions_out.extend([(c.name, c.get_short_help_str()) for c in remaining_commands])
|
||||
|
||||
|
||||
def get_choices(cli, prog_name, args, incomplete):
|
||||
"""
|
||||
:param cli: command definition
|
||||
:param prog_name: the program that is running
|
||||
:param args: full list of args
|
||||
:param incomplete: the incomplete text to autocomplete
|
||||
:return: all the possible completions for the incomplete
|
||||
"""
|
||||
all_args = copy.deepcopy(args)
|
||||
|
||||
ctx = resolve_ctx(cli, prog_name, args)
|
||||
if ctx is None:
|
||||
return []
|
||||
|
||||
# In newer versions of bash long opts with '='s are partitioned, but it's easier to parse
|
||||
# without the '='
|
||||
if start_of_option(incomplete) and WORDBREAK in incomplete:
|
||||
partition_incomplete = incomplete.partition(WORDBREAK)
|
||||
all_args.append(partition_incomplete[0])
|
||||
incomplete = partition_incomplete[2]
|
||||
elif incomplete == WORDBREAK:
|
||||
incomplete = ''
|
||||
|
||||
completions = []
|
||||
if start_of_option(incomplete):
|
||||
# completions for partial options
|
||||
for param in ctx.command.params:
|
||||
if isinstance(param, Option) and not param.hidden:
|
||||
param_opts = [param_opt for param_opt in param.opts +
|
||||
param.secondary_opts if param_opt not in all_args or param.multiple]
|
||||
completions.extend([(o, param.help) for o in param_opts if o.startswith(incomplete)])
|
||||
return completions
|
||||
# completion for option values from user supplied values
|
||||
for param in ctx.command.params:
|
||||
if is_incomplete_option(all_args, param):
|
||||
return get_user_autocompletions(ctx, all_args, incomplete, param)
|
||||
# completion for argument values from user supplied values
|
||||
for param in ctx.command.params:
|
||||
if is_incomplete_argument(ctx.params, param):
|
||||
return get_user_autocompletions(ctx, all_args, incomplete, param)
|
||||
|
||||
add_subcommand_completions(ctx, incomplete, completions)
|
||||
# Sort before returning so that proper ordering can be enforced in custom types.
|
||||
return sorted(completions)
|
||||
|
||||
|
||||
def do_complete(cli, prog_name, include_descriptions):
|
||||
cwords = split_arg_string(os.environ['COMP_WORDS'])
|
||||
cword = int(os.environ['COMP_CWORD'])
|
||||
args = cwords[1:cword]
|
||||
try:
|
||||
incomplete = cwords[cword]
|
||||
except IndexError:
|
||||
incomplete = ''
|
||||
|
||||
for item in get_choices(cli, prog_name, args, incomplete):
|
||||
echo(item[0])
|
||||
if include_descriptions:
|
||||
# ZSH has trouble dealing with empty array parameters when returned from commands, so use a well defined character '_' to indicate no description is present.
|
||||
echo(item[1] if item[1] else '_')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def bashcomplete(cli, prog_name, complete_var, complete_instr):
|
||||
if complete_instr.startswith('source'):
|
||||
shell = 'zsh' if complete_instr == 'source_zsh' else 'bash'
|
||||
echo(get_completion_script(prog_name, complete_var, shell))
|
||||
return True
|
||||
elif complete_instr == 'complete' or complete_instr == 'complete_zsh':
|
||||
return do_complete(cli, prog_name, complete_instr == 'complete_zsh')
|
||||
return False
|
||||
@@ -1,703 +0,0 @@
|
||||
import re
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import codecs
|
||||
from weakref import WeakKeyDictionary
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
CYGWIN = sys.platform.startswith('cygwin')
|
||||
# Determine local App Engine environment, per Google's own suggestion
|
||||
APP_ENGINE = ('APPENGINE_RUNTIME' in os.environ and
|
||||
'Development/' in os.environ['SERVER_SOFTWARE'])
|
||||
WIN = sys.platform.startswith('win') and not APP_ENGINE
|
||||
DEFAULT_COLUMNS = 80
|
||||
|
||||
|
||||
_ansi_re = re.compile(r'\033\[((?:\d|;)*)([a-zA-Z])')
|
||||
|
||||
|
||||
def get_filesystem_encoding():
|
||||
return sys.getfilesystemencoding() or sys.getdefaultencoding()
|
||||
|
||||
|
||||
def _make_text_stream(stream, encoding, errors,
|
||||
force_readable=False, force_writable=False):
|
||||
if encoding is None:
|
||||
encoding = get_best_encoding(stream)
|
||||
if errors is None:
|
||||
errors = 'replace'
|
||||
return _NonClosingTextIOWrapper(stream, encoding, errors,
|
||||
line_buffering=True,
|
||||
force_readable=force_readable,
|
||||
force_writable=force_writable)
|
||||
|
||||
|
||||
def is_ascii_encoding(encoding):
|
||||
"""Checks if a given encoding is ascii."""
|
||||
try:
|
||||
return codecs.lookup(encoding).name == 'ascii'
|
||||
except LookupError:
|
||||
return False
|
||||
|
||||
|
||||
def get_best_encoding(stream):
|
||||
"""Returns the default stream encoding if not found."""
|
||||
rv = getattr(stream, 'encoding', None) or sys.getdefaultencoding()
|
||||
if is_ascii_encoding(rv):
|
||||
return 'utf-8'
|
||||
return rv
|
||||
|
||||
|
||||
class _NonClosingTextIOWrapper(io.TextIOWrapper):
|
||||
|
||||
def __init__(self, stream, encoding, errors,
|
||||
force_readable=False, force_writable=False, **extra):
|
||||
self._stream = stream = _FixupStream(stream, force_readable,
|
||||
force_writable)
|
||||
io.TextIOWrapper.__init__(self, stream, encoding, errors, **extra)
|
||||
|
||||
# The io module is a place where the Python 3 text behavior
|
||||
# was forced upon Python 2, so we need to unbreak
|
||||
# it to look like Python 2.
|
||||
if PY2:
|
||||
def write(self, x):
|
||||
if isinstance(x, str) or is_bytes(x):
|
||||
try:
|
||||
self.flush()
|
||||
except Exception:
|
||||
pass
|
||||
return self.buffer.write(str(x))
|
||||
return io.TextIOWrapper.write(self, x)
|
||||
|
||||
def writelines(self, lines):
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.detach()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def isatty(self):
|
||||
# https://bitbucket.org/pypy/pypy/issue/1803
|
||||
return self._stream.isatty()
|
||||
|
||||
|
||||
class _FixupStream(object):
|
||||
"""The new io interface needs more from streams than streams
|
||||
traditionally implement. As such, this fix-up code is necessary in
|
||||
some circumstances.
|
||||
|
||||
The forcing of readable and writable flags are there because some tools
|
||||
put badly patched objects on sys (one such offender are certain version
|
||||
of jupyter notebook).
|
||||
"""
|
||||
|
||||
def __init__(self, stream, force_readable=False, force_writable=False):
|
||||
self._stream = stream
|
||||
self._force_readable = force_readable
|
||||
self._force_writable = force_writable
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._stream, name)
|
||||
|
||||
def read1(self, size):
|
||||
f = getattr(self._stream, 'read1', None)
|
||||
if f is not None:
|
||||
return f(size)
|
||||
# We only dispatch to readline instead of read in Python 2 as we
|
||||
# do not want cause problems with the different implementation
|
||||
# of line buffering.
|
||||
if PY2:
|
||||
return self._stream.readline(size)
|
||||
return self._stream.read(size)
|
||||
|
||||
def readable(self):
|
||||
if self._force_readable:
|
||||
return True
|
||||
x = getattr(self._stream, 'readable', None)
|
||||
if x is not None:
|
||||
return x()
|
||||
try:
|
||||
self._stream.read(0)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def writable(self):
|
||||
if self._force_writable:
|
||||
return True
|
||||
x = getattr(self._stream, 'writable', None)
|
||||
if x is not None:
|
||||
return x()
|
||||
try:
|
||||
self._stream.write('')
|
||||
except Exception:
|
||||
try:
|
||||
self._stream.write(b'')
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
def seekable(self):
|
||||
x = getattr(self._stream, 'seekable', None)
|
||||
if x is not None:
|
||||
return x()
|
||||
try:
|
||||
self._stream.seek(self._stream.tell())
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
if PY2:
|
||||
text_type = unicode
|
||||
bytes = str
|
||||
raw_input = raw_input
|
||||
string_types = (str, unicode)
|
||||
int_types = (int, long)
|
||||
iteritems = lambda x: x.iteritems()
|
||||
range_type = xrange
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (buffer, bytearray))
|
||||
|
||||
_identifier_re = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
|
||||
|
||||
# For Windows, we need to force stdout/stdin/stderr to binary if it's
|
||||
# fetched for that. This obviously is not the most correct way to do
|
||||
# it as it changes global state. Unfortunately, there does not seem to
|
||||
# be a clear better way to do it as just reopening the file in binary
|
||||
# mode does not change anything.
|
||||
#
|
||||
# An option would be to do what Python 3 does and to open the file as
|
||||
# binary only, patch it back to the system, and then use a wrapper
|
||||
# stream that converts newlines. It's not quite clear what's the
|
||||
# correct option here.
|
||||
#
|
||||
# This code also lives in _winconsole for the fallback to the console
|
||||
# emulation stream.
|
||||
#
|
||||
# There are also Windows environments where the `msvcrt` module is not
|
||||
# available (which is why we use try-catch instead of the WIN variable
|
||||
# here), such as the Google App Engine development server on Windows. In
|
||||
# those cases there is just nothing we can do.
|
||||
def set_binary_mode(f):
|
||||
return f
|
||||
|
||||
try:
|
||||
import msvcrt
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def set_binary_mode(f):
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
msvcrt.setmode(fileno, os.O_BINARY)
|
||||
return f
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def set_binary_mode(f):
|
||||
try:
|
||||
fileno = f.fileno()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
flags = fcntl.fcntl(fileno, fcntl.F_GETFL)
|
||||
fcntl.fcntl(fileno, fcntl.F_SETFL, flags & ~os.O_NONBLOCK)
|
||||
return f
|
||||
|
||||
def isidentifier(x):
|
||||
return _identifier_re.search(x) is not None
|
||||
|
||||
def get_binary_stdin():
|
||||
return set_binary_mode(sys.stdin)
|
||||
|
||||
def get_binary_stdout():
|
||||
_wrap_std_stream('stdout')
|
||||
return set_binary_mode(sys.stdout)
|
||||
|
||||
def get_binary_stderr():
|
||||
_wrap_std_stream('stderr')
|
||||
return set_binary_mode(sys.stderr)
|
||||
|
||||
def get_text_stdin(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _make_text_stream(sys.stdin, encoding, errors,
|
||||
force_readable=True)
|
||||
|
||||
def get_text_stdout(encoding=None, errors=None):
|
||||
_wrap_std_stream('stdout')
|
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _make_text_stream(sys.stdout, encoding, errors,
|
||||
force_writable=True)
|
||||
|
||||
def get_text_stderr(encoding=None, errors=None):
|
||||
_wrap_std_stream('stderr')
|
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _make_text_stream(sys.stderr, encoding, errors,
|
||||
force_writable=True)
|
||||
|
||||
def filename_to_ui(value):
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(get_filesystem_encoding(), 'replace')
|
||||
return value
|
||||
else:
|
||||
import io
|
||||
text_type = str
|
||||
raw_input = input
|
||||
string_types = (str,)
|
||||
int_types = (int,)
|
||||
range_type = range
|
||||
isidentifier = lambda x: x.isidentifier()
|
||||
iteritems = lambda x: iter(x.items())
|
||||
|
||||
def is_bytes(x):
|
||||
return isinstance(x, (bytes, memoryview, bytearray))
|
||||
|
||||
def _is_binary_reader(stream, default=False):
|
||||
try:
|
||||
return isinstance(stream.read(0), bytes)
|
||||
except Exception:
|
||||
return default
|
||||
# This happens in some cases where the stream was already
|
||||
# closed. In this case, we assume the default.
|
||||
|
||||
def _is_binary_writer(stream, default=False):
|
||||
try:
|
||||
stream.write(b'')
|
||||
except Exception:
|
||||
try:
|
||||
stream.write('')
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
return default
|
||||
return True
|
||||
|
||||
def _find_binary_reader(stream):
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detaching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_reader(stream, False):
|
||||
return stream
|
||||
|
||||
buf = getattr(stream, 'buffer', None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_reader(buf, True):
|
||||
return buf
|
||||
|
||||
def _find_binary_writer(stream):
|
||||
# We need to figure out if the given stream is already binary.
|
||||
# This can happen because the official docs recommend detatching
|
||||
# the streams to get binary streams. Some code might do this, so
|
||||
# we need to deal with this case explicitly.
|
||||
if _is_binary_writer(stream, False):
|
||||
return stream
|
||||
|
||||
buf = getattr(stream, 'buffer', None)
|
||||
|
||||
# Same situation here; this time we assume that the buffer is
|
||||
# actually binary in case it's closed.
|
||||
if buf is not None and _is_binary_writer(buf, True):
|
||||
return buf
|
||||
|
||||
def _stream_is_misconfigured(stream):
|
||||
"""A stream is misconfigured if its encoding is ASCII."""
|
||||
# If the stream does not have an encoding set, we assume it's set
|
||||
# to ASCII. This appears to happen in certain unittest
|
||||
# environments. It's not quite clear what the correct behavior is
|
||||
# but this at least will force Click to recover somehow.
|
||||
return is_ascii_encoding(getattr(stream, 'encoding', None) or 'ascii')
|
||||
|
||||
def _is_compatible_text_stream(stream, encoding, errors):
|
||||
stream_encoding = getattr(stream, 'encoding', None)
|
||||
stream_errors = getattr(stream, 'errors', None)
|
||||
|
||||
# Perfect match.
|
||||
if stream_encoding == encoding and stream_errors == errors:
|
||||
return True
|
||||
|
||||
# Otherwise, it's only a compatible stream if we did not ask for
|
||||
# an encoding.
|
||||
if encoding is None:
|
||||
return stream_encoding is not None
|
||||
|
||||
return False
|
||||
|
||||
def _force_correct_text_reader(text_reader, encoding, errors,
|
||||
force_readable=False):
|
||||
if _is_binary_reader(text_reader, False):
|
||||
binary_reader = text_reader
|
||||
else:
|
||||
# If there is no target encoding set, we need to verify that the
|
||||
# reader is not actually misconfigured.
|
||||
if encoding is None and not _stream_is_misconfigured(text_reader):
|
||||
return text_reader
|
||||
|
||||
if _is_compatible_text_stream(text_reader, encoding, errors):
|
||||
return text_reader
|
||||
|
||||
# If the reader has no encoding, we try to find the underlying
|
||||
# binary reader for it. If that fails because the environment is
|
||||
# misconfigured, we silently go with the same reader because this
|
||||
# is too common to happen. In that case, mojibake is better than
|
||||
# exceptions.
|
||||
binary_reader = _find_binary_reader(text_reader)
|
||||
if binary_reader is None:
|
||||
return text_reader
|
||||
|
||||
# At this point, we default the errors to replace instead of strict
|
||||
# because nobody handles those errors anyways and at this point
|
||||
# we're so fundamentally fucked that nothing can repair it.
|
||||
if errors is None:
|
||||
errors = 'replace'
|
||||
return _make_text_stream(binary_reader, encoding, errors,
|
||||
force_readable=force_readable)
|
||||
|
||||
def _force_correct_text_writer(text_writer, encoding, errors,
|
||||
force_writable=False):
|
||||
if _is_binary_writer(text_writer, False):
|
||||
binary_writer = text_writer
|
||||
else:
|
||||
# If there is no target encoding set, we need to verify that the
|
||||
# writer is not actually misconfigured.
|
||||
if encoding is None and not _stream_is_misconfigured(text_writer):
|
||||
return text_writer
|
||||
|
||||
if _is_compatible_text_stream(text_writer, encoding, errors):
|
||||
return text_writer
|
||||
|
||||
# If the writer has no encoding, we try to find the underlying
|
||||
# binary writer for it. If that fails because the environment is
|
||||
# misconfigured, we silently go with the same writer because this
|
||||
# is too common to happen. In that case, mojibake is better than
|
||||
# exceptions.
|
||||
binary_writer = _find_binary_writer(text_writer)
|
||||
if binary_writer is None:
|
||||
return text_writer
|
||||
|
||||
# At this point, we default the errors to replace instead of strict
|
||||
# because nobody handles those errors anyways and at this point
|
||||
# we're so fundamentally fucked that nothing can repair it.
|
||||
if errors is None:
|
||||
errors = 'replace'
|
||||
return _make_text_stream(binary_writer, encoding, errors,
|
||||
force_writable=force_writable)
|
||||
|
||||
def get_binary_stdin():
|
||||
reader = _find_binary_reader(sys.stdin)
|
||||
if reader is None:
|
||||
raise RuntimeError('Was not able to determine binary '
|
||||
'stream for sys.stdin.')
|
||||
return reader
|
||||
|
||||
def get_binary_stdout():
|
||||
writer = _find_binary_writer(sys.stdout)
|
||||
if writer is None:
|
||||
raise RuntimeError('Was not able to determine binary '
|
||||
'stream for sys.stdout.')
|
||||
return writer
|
||||
|
||||
def get_binary_stderr():
|
||||
writer = _find_binary_writer(sys.stderr)
|
||||
if writer is None:
|
||||
raise RuntimeError('Was not able to determine binary '
|
||||
'stream for sys.stderr.')
|
||||
return writer
|
||||
|
||||
def get_text_stdin(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_reader(sys.stdin, encoding, errors,
|
||||
force_readable=True)
|
||||
|
||||
def get_text_stdout(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(sys.stdout, encoding, errors,
|
||||
force_writable=True)
|
||||
|
||||
def get_text_stderr(encoding=None, errors=None):
|
||||
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||
if rv is not None:
|
||||
return rv
|
||||
return _force_correct_text_writer(sys.stderr, encoding, errors,
|
||||
force_writable=True)
|
||||
|
||||
def filename_to_ui(value):
|
||||
if isinstance(value, bytes):
|
||||
value = value.decode(get_filesystem_encoding(), 'replace')
|
||||
else:
|
||||
value = value.encode('utf-8', 'surrogateescape') \
|
||||
.decode('utf-8', 'replace')
|
||||
return value
|
||||
|
||||
|
||||
def get_streerror(e, default=None):
|
||||
if hasattr(e, 'strerror'):
|
||||
msg = e.strerror
|
||||
else:
|
||||
if default is not None:
|
||||
msg = default
|
||||
else:
|
||||
msg = str(e)
|
||||
if isinstance(msg, bytes):
|
||||
msg = msg.decode('utf-8', 'replace')
|
||||
return msg
|
||||
|
||||
|
||||
def open_stream(filename, mode='r', encoding=None, errors='strict',
|
||||
atomic=False):
|
||||
# Standard streams first. These are simple because they don't need
|
||||
# special handling for the atomic flag. It's entirely ignored.
|
||||
if filename == '-':
|
||||
if any(m in mode for m in ['w', 'a', 'x']):
|
||||
if 'b' in mode:
|
||||
return get_binary_stdout(), False
|
||||
return get_text_stdout(encoding=encoding, errors=errors), False
|
||||
if 'b' in mode:
|
||||
return get_binary_stdin(), False
|
||||
return get_text_stdin(encoding=encoding, errors=errors), False
|
||||
|
||||
# Non-atomic writes directly go out through the regular open functions.
|
||||
if not atomic:
|
||||
if encoding is None:
|
||||
return open(filename, mode), True
|
||||
return io.open(filename, mode, encoding=encoding, errors=errors), True
|
||||
|
||||
# Some usability stuff for atomic writes
|
||||
if 'a' in mode:
|
||||
raise ValueError(
|
||||
'Appending to an existing file is not supported, because that '
|
||||
'would involve an expensive `copy`-operation to a temporary '
|
||||
'file. Open the file in normal `w`-mode and copy explicitly '
|
||||
'if that\'s what you\'re after.'
|
||||
)
|
||||
if 'x' in mode:
|
||||
raise ValueError('Use the `overwrite`-parameter instead.')
|
||||
if 'w' not in mode:
|
||||
raise ValueError('Atomic writes only make sense with `w`-mode.')
|
||||
|
||||
# Atomic writes are more complicated. They work by opening a file
|
||||
# as a proxy in the same folder and then using the fdopen
|
||||
# functionality to wrap it in a Python file. Then we wrap it in an
|
||||
# atomic file that moves the file over on close.
|
||||
import tempfile
|
||||
fd, tmp_filename = tempfile.mkstemp(dir=os.path.dirname(filename),
|
||||
prefix='.__atomic-write')
|
||||
|
||||
if encoding is not None:
|
||||
f = io.open(fd, mode, encoding=encoding, errors=errors)
|
||||
else:
|
||||
f = os.fdopen(fd, mode)
|
||||
|
||||
return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True
|
||||
|
||||
|
||||
# Used in a destructor call, needs extra protection from interpreter cleanup.
|
||||
if hasattr(os, 'replace'):
|
||||
_replace = os.replace
|
||||
_can_replace = True
|
||||
else:
|
||||
_replace = os.rename
|
||||
_can_replace = not WIN
|
||||
|
||||
|
||||
class _AtomicFile(object):
|
||||
|
||||
def __init__(self, f, tmp_filename, real_filename):
|
||||
self._f = f
|
||||
self._tmp_filename = tmp_filename
|
||||
self._real_filename = real_filename
|
||||
self.closed = False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._real_filename
|
||||
|
||||
def close(self, delete=False):
|
||||
if self.closed:
|
||||
return
|
||||
self._f.close()
|
||||
if not _can_replace:
|
||||
try:
|
||||
os.remove(self._real_filename)
|
||||
except OSError:
|
||||
pass
|
||||
_replace(self._tmp_filename, self._real_filename)
|
||||
self.closed = True
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._f, name)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.close(delete=exc_type is not None)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._f)
|
||||
|
||||
|
||||
auto_wrap_for_ansi = None
|
||||
colorama = None
|
||||
get_winterm_size = None
|
||||
|
||||
|
||||
def strip_ansi(value):
|
||||
return _ansi_re.sub('', value)
|
||||
|
||||
|
||||
def should_strip_ansi(stream=None, color=None):
|
||||
if color is None:
|
||||
if stream is None:
|
||||
stream = sys.stdin
|
||||
return not isatty(stream)
|
||||
return not color
|
||||
|
||||
|
||||
# If we're on Windows, we provide transparent integration through
|
||||
# colorama. This will make ANSI colors through the echo function
|
||||
# work automatically.
|
||||
if WIN:
|
||||
# Windows has a smaller terminal
|
||||
DEFAULT_COLUMNS = 79
|
||||
|
||||
from ._winconsole import _get_windows_console_stream, _wrap_std_stream
|
||||
|
||||
def _get_argv_encoding():
|
||||
import locale
|
||||
return locale.getpreferredencoding()
|
||||
|
||||
if PY2:
|
||||
def raw_input(prompt=''):
|
||||
sys.stderr.flush()
|
||||
if prompt:
|
||||
stdout = _default_text_stdout()
|
||||
stdout.write(prompt)
|
||||
stdin = _default_text_stdin()
|
||||
return stdin.readline().rstrip('\r\n')
|
||||
|
||||
try:
|
||||
import colorama
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
_ansi_stream_wrappers = WeakKeyDictionary()
|
||||
|
||||
def auto_wrap_for_ansi(stream, color=None):
|
||||
"""This function wraps a stream so that calls through colorama
|
||||
are issued to the win32 console API to recolor on demand. It
|
||||
also ensures to reset the colors if a write call is interrupted
|
||||
to not destroy the console afterwards.
|
||||
"""
|
||||
try:
|
||||
cached = _ansi_stream_wrappers.get(stream)
|
||||
except Exception:
|
||||
cached = None
|
||||
if cached is not None:
|
||||
return cached
|
||||
strip = should_strip_ansi(stream, color)
|
||||
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
|
||||
rv = ansi_wrapper.stream
|
||||
_write = rv.write
|
||||
|
||||
def _safe_write(s):
|
||||
try:
|
||||
return _write(s)
|
||||
except:
|
||||
ansi_wrapper.reset_all()
|
||||
raise
|
||||
|
||||
rv.write = _safe_write
|
||||
try:
|
||||
_ansi_stream_wrappers[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
return rv
|
||||
|
||||
def get_winterm_size():
|
||||
win = colorama.win32.GetConsoleScreenBufferInfo(
|
||||
colorama.win32.STDOUT).srWindow
|
||||
return win.Right - win.Left, win.Bottom - win.Top
|
||||
else:
|
||||
def _get_argv_encoding():
|
||||
return getattr(sys.stdin, 'encoding', None) or get_filesystem_encoding()
|
||||
|
||||
_get_windows_console_stream = lambda *x: None
|
||||
_wrap_std_stream = lambda *x: None
|
||||
|
||||
|
||||
def term_len(x):
|
||||
return len(strip_ansi(x))
|
||||
|
||||
|
||||
def isatty(stream):
|
||||
try:
|
||||
return stream.isatty()
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def _make_cached_stream_func(src_func, wrapper_func):
|
||||
cache = WeakKeyDictionary()
|
||||
def func():
|
||||
stream = src_func()
|
||||
try:
|
||||
rv = cache.get(stream)
|
||||
except Exception:
|
||||
rv = None
|
||||
if rv is not None:
|
||||
return rv
|
||||
rv = wrapper_func()
|
||||
try:
|
||||
stream = src_func() # In case wrapper_func() modified the stream
|
||||
cache[stream] = rv
|
||||
except Exception:
|
||||
pass
|
||||
return rv
|
||||
return func
|
||||
|
||||
|
||||
_default_text_stdin = _make_cached_stream_func(
|
||||
lambda: sys.stdin, get_text_stdin)
|
||||
_default_text_stdout = _make_cached_stream_func(
|
||||
lambda: sys.stdout, get_text_stdout)
|
||||
_default_text_stderr = _make_cached_stream_func(
|
||||
lambda: sys.stderr, get_text_stderr)
|
||||
|
||||
|
||||
binary_streams = {
|
||||
'stdin': get_binary_stdin,
|
||||
'stdout': get_binary_stdout,
|
||||
'stderr': get_binary_stderr,
|
||||
}
|
||||
|
||||
text_streams = {
|
||||
'stdin': get_text_stdin,
|
||||
'stdout': get_text_stdout,
|
||||
'stderr': get_text_stderr,
|
||||
}
|
||||
@@ -1,621 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
click._termui_impl
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains implementations for the termui module. To keep the
|
||||
import time of Click down, some infrequently used functionality is
|
||||
placed in this module and only imported as needed.
|
||||
|
||||
:copyright: © 2014 by the Pallets team.
|
||||
:license: BSD, see LICENSE.rst for more details.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import math
|
||||
import contextlib
|
||||
from ._compat import _default_text_stdout, range_type, PY2, isatty, \
|
||||
open_stream, strip_ansi, term_len, get_best_encoding, WIN, int_types, \
|
||||
CYGWIN
|
||||
from .utils import echo
|
||||
from .exceptions import ClickException
|
||||
|
||||
|
||||
if os.name == 'nt':
|
||||
BEFORE_BAR = '\r'
|
||||
AFTER_BAR = '\n'
|
||||
else:
|
||||
BEFORE_BAR = '\r\033[?25l'
|
||||
AFTER_BAR = '\033[?25h\n'
|
||||
|
||||
|
||||
def _length_hint(obj):
|
||||
"""Returns the length hint of an object."""
|
||||
try:
|
||||
return len(obj)
|
||||
except (AttributeError, TypeError):
|
||||
try:
|
||||
get_hint = type(obj).__length_hint__
|
||||
except AttributeError:
|
||||
return None
|
||||
try:
|
||||
hint = get_hint(obj)
|
||||
except TypeError:
|
||||
return None
|
||||
if hint is NotImplemented or \
|
||||
not isinstance(hint, int_types) or \
|
||||
hint < 0:
|
||||
return None
|
||||
return hint
|
||||
|
||||
|
||||
class ProgressBar(object):
|
||||
|
||||
def __init__(self, iterable, length=None, fill_char='#', empty_char=' ',
|
||||
bar_template='%(bar)s', info_sep=' ', show_eta=True,
|
||||
show_percent=None, show_pos=False, item_show_func=None,
|
||||
label=None, file=None, color=None, width=30):
|
||||
self.fill_char = fill_char
|
||||
self.empty_char = empty_char
|
||||
self.bar_template = bar_template
|
||||
self.info_sep = info_sep
|
||||
self.show_eta = show_eta
|
||||
self.show_percent = show_percent
|
||||
self.show_pos = show_pos
|
||||
self.item_show_func = item_show_func
|
||||
self.label = label or ''
|
||||
if file is None:
|
||||
file = _default_text_stdout()
|
||||
self.file = file
|
||||
self.color = color
|
||||
self.width = width
|
||||
self.autowidth = width == 0
|
||||
|
||||
if length is None:
|
||||
length = _length_hint(iterable)
|
||||
if iterable is None:
|
||||
if length is None:
|
||||
raise TypeError('iterable or length is required')
|
||||
iterable = range_type(length)
|
||||
self.iter = iter(iterable)
|
||||
self.length = length
|
||||
self.length_known = length is not None
|
||||
self.pos = 0
|
||||
self.avg = []
|
||||
self.start = self.last_eta = time.time()
|
||||
self.eta_known = False
|
||||
self.finished = False
|
||||
self.max_width = None
|
||||
self.entered = False
|
||||
self.current_item = None
|
||||
self.is_hidden = not isatty(self.file)
|
||||
self._last_line = None
|
||||
self.short_limit = 0.5
|
||||
|
||||
def __enter__(self):
|
||||
self.entered = True
|
||||
self.render_progress()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.render_finish()
|
||||
|
||||
def __iter__(self):
|
||||
if not self.entered:
|
||||
raise RuntimeError('You need to use progress bars in a with block.')
|
||||
self.render_progress()
|
||||
return self.generator()
|
||||
|
||||
def is_fast(self):
|
||||
return time.time() - self.start <= self.short_limit
|
||||
|
||||
def render_finish(self):
|
||||
if self.is_hidden or self.is_fast():
|
||||
return
|
||||
self.file.write(AFTER_BAR)
|
||||
self.file.flush()
|
||||
|
||||
@property
|
||||
def pct(self):
|
||||
if self.finished:
|
||||
return 1.0
|
||||
return min(self.pos / (float(self.length) or 1), 1.0)
|
||||
|
||||
@property
|
||||
def time_per_iteration(self):
|
||||
if not self.avg:
|
||||
return 0.0
|
||||
return sum(self.avg) / float(len(self.avg))
|
||||
|
||||
@property
|
||||
def eta(self):
|
||||
if self.length_known and not self.finished:
|
||||
return self.time_per_iteration * (self.length - self.pos)
|
||||
return 0.0
|
||||
|
||||
def format_eta(self):
|
||||
if self.eta_known:
|
||||
t = int(self.eta)
|
||||
seconds = t % 60
|
||||
t //= 60
|
||||
minutes = t % 60
|
||||
t //= 60
|
||||
hours = t % 24
|
||||
t //= 24
|
||||
if t > 0:
|
||||
days = t
|
||||
return '%dd %02d:%02d:%02d' % (days, hours, minutes, seconds)
|
||||
else:
|
||||
return '%02d:%02d:%02d' % (hours, minutes, seconds)
|
||||
return ''
|
||||
|
||||
def format_pos(self):
|
||||
pos = str(self.pos)
|
||||
if self.length_known:
|
||||
pos += '/%s' % self.length
|
||||
return pos
|
||||
|
||||
def format_pct(self):
|
||||
return ('% 4d%%' % int(self.pct * 100))[1:]
|
||||
|
||||
def format_bar(self):
|
||||
if self.length_known:
|
||||
bar_length = int(self.pct * self.width)
|
||||
bar = self.fill_char * bar_length
|
||||
bar += self.empty_char * (self.width - bar_length)
|
||||
elif self.finished:
|
||||
bar = self.fill_char * self.width
|
||||
else:
|
||||
bar = list(self.empty_char * (self.width or 1))
|
||||
if self.time_per_iteration != 0:
|
||||
bar[int((math.cos(self.pos * self.time_per_iteration)
|
||||
/ 2.0 + 0.5) * self.width)] = self.fill_char
|
||||
bar = ''.join(bar)
|
||||
return bar
|
||||
|
||||
def format_progress_line(self):
|
||||
show_percent = self.show_percent
|
||||
|
||||
info_bits = []
|
||||
if self.length_known and show_percent is None:
|
||||
show_percent = not self.show_pos
|
||||
|
||||
if self.show_pos:
|
||||
info_bits.append(self.format_pos())
|
||||
if show_percent:
|
||||
info_bits.append(self.format_pct())
|
||||
if self.show_eta and self.eta_known and not self.finished:
|
||||
info_bits.append(self.format_eta())
|
||||
if self.item_show_func is not None:
|
||||
item_info = self.item_show_func(self.current_item)
|
||||
if item_info is not None:
|
||||
info_bits.append(item_info)
|
||||
|
||||
return (self.bar_template % {
|
||||
'label': self.label,
|
||||
'bar': self.format_bar(),
|
||||
'info': self.info_sep.join(info_bits)
|
||||
}).rstrip()
|
||||
|
||||
def render_progress(self):
|
||||
from .termui import get_terminal_size
|
||||
|
||||
if self.is_hidden:
|
||||
return
|
||||
|
||||
buf = []
|
||||
# Update width in case the terminal has been resized
|
||||
if self.autowidth:
|
||||
old_width = self.width
|
||||
self.width = 0
|
||||
clutter_length = term_len(self.format_progress_line())
|
||||
new_width = max(0, get_terminal_size()[0] - clutter_length)
|
||||
if new_width < old_width:
|
||||
buf.append(BEFORE_BAR)
|
||||
buf.append(' ' * self.max_width)
|
||||
self.max_width = new_width
|
||||
self.width = new_width
|
||||
|
||||
clear_width = self.width
|
||||
if self.max_width is not None:
|
||||
clear_width = self.max_width
|
||||
|
||||
buf.append(BEFORE_BAR)
|
||||
line = self.format_progress_line()
|
||||
line_len = term_len(line)
|
||||
if self.max_width is None or self.max_width < line_len:
|
||||
self.max_width = line_len
|
||||
|
||||
buf.append(line)
|
||||
buf.append(' ' * (clear_width - line_len))
|
||||
line = ''.join(buf)
|
||||
# Render the line only if it changed.
|
||||
|
||||
if line != self._last_line and not self.is_fast():
|
||||
self._last_line = line
|
||||
echo(line, file=self.file, color=self.color, nl=False)
|
||||
self.file.flush()
|
||||
|
||||
def make_step(self, n_steps):
|
||||
self.pos += n_steps
|
||||
if self.length_known and self.pos >= self.length:
|
||||
self.finished = True
|
||||
|
||||
if (time.time() - self.last_eta) < 1.0:
|
||||
return
|
||||
|
||||
self.last_eta = time.time()
|
||||
|
||||
# self.avg is a rolling list of length <= 7 of steps where steps are
|
||||
# defined as time elapsed divided by the total progress through
|
||||
# self.length.
|
||||
if self.pos:
|
||||
step = (time.time() - self.start) / self.pos
|
||||
else:
|
||||
step = time.time() - self.start
|
||||
|
||||
self.avg = self.avg[-6:] + [step]
|
||||
|
||||
self.eta_known = self.length_known
|
||||
|
||||
def update(self, n_steps):
|
||||
self.make_step(n_steps)
|
||||
self.render_progress()
|
||||
|
||||
def finish(self):
|
||||
self.eta_known = 0
|
||||
self.current_item = None
|
||||
self.finished = True
|
||||
|
||||
def generator(self):
|
||||
"""
|
||||
Returns a generator which yields the items added to the bar during
|
||||
construction, and updates the progress bar *after* the yielded block
|
||||
returns.
|
||||
"""
|
||||
if not self.entered:
|
||||
raise RuntimeError('You need to use progress bars in a with block.')
|
||||
|
||||
if self.is_hidden:
|
||||
for rv in self.iter:
|
||||
yield rv
|
||||
else:
|
||||
for rv in self.iter:
|
||||
self.current_item = rv
|
||||
yield rv
|
||||
self.update(1)
|
||||
self.finish()
|
||||
self.render_progress()
|
||||
|
||||
|
||||
def pager(generator, color=None):
|
||||
"""Decide what method to use for paging through text."""
|
||||
stdout = _default_text_stdout()
|
||||
if not isatty(sys.stdin) or not isatty(stdout):
|
||||
return _nullpager(stdout, generator, color)
|
||||
pager_cmd = (os.environ.get('PAGER', None) or '').strip()
|
||||
if pager_cmd:
|
||||
if WIN:
|
||||
return _tempfilepager(generator, pager_cmd, color)
|
||||
return _pipepager(generator, pager_cmd, color)
|
||||
if os.environ.get('TERM') in ('dumb', 'emacs'):
|
||||
return _nullpager(stdout, generator, color)
|
||||
if WIN or sys.platform.startswith('os2'):
|
||||
return _tempfilepager(generator, 'more <', color)
|
||||
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
|
||||
return _pipepager(generator, 'less', color)
|
||||
|
||||
import tempfile
|
||||
fd, filename = tempfile.mkstemp()
|
||||
os.close(fd)
|
||||
try:
|
||||
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
|
||||
return _pipepager(generator, 'more', color)
|
||||
return _nullpager(stdout, generator, color)
|
||||
finally:
|
||||
os.unlink(filename)
|
||||
|
||||
|
||||
def _pipepager(generator, cmd, color):
|
||||
"""Page through text by feeding it to another program. Invoking a
|
||||
pager through this might support colors.
|
||||
"""
|
||||
import subprocess
|
||||
env = dict(os.environ)
|
||||
|
||||
# If we're piping to less we might support colors under the
|
||||
# condition that
|
||||
cmd_detail = cmd.rsplit('/', 1)[-1].split()
|
||||
if color is None and cmd_detail[0] == 'less':
|
||||
less_flags = os.environ.get('LESS', '') + ' '.join(cmd_detail[1:])
|
||||
if not less_flags:
|
||||
env['LESS'] = '-R'
|
||||
color = True
|
||||
elif 'r' in less_flags or 'R' in less_flags:
|
||||
color = True
|
||||
|
||||
c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
|
||||
env=env)
|
||||
encoding = get_best_encoding(c.stdin)
|
||||
try:
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
|
||||
c.stdin.write(text.encode(encoding, 'replace'))
|
||||
except (IOError, KeyboardInterrupt):
|
||||
pass
|
||||
else:
|
||||
c.stdin.close()
|
||||
|
||||
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting
|
||||
# search or other commands inside less).
|
||||
#
|
||||
# That means when the user hits ^C, the parent process (click) terminates,
|
||||
# but less is still alive, paging the output and messing up the terminal.
|
||||
#
|
||||
# If the user wants to make the pager exit on ^C, they should set
|
||||
# `LESS='-K'`. It's not our decision to make.
|
||||
while True:
|
||||
try:
|
||||
c.wait()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def _tempfilepager(generator, cmd, color):
|
||||
"""Page through text by invoking a program on a temporary file."""
|
||||
import tempfile
|
||||
filename = tempfile.mktemp()
|
||||
# TODO: This never terminates if the passed generator never terminates.
|
||||
text = "".join(generator)
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
encoding = get_best_encoding(sys.stdout)
|
||||
with open_stream(filename, 'wb')[0] as f:
|
||||
f.write(text.encode(encoding))
|
||||
try:
|
||||
os.system(cmd + ' "' + filename + '"')
|
||||
finally:
|
||||
os.unlink(filename)
|
||||
|
||||
|
||||
def _nullpager(stream, generator, color):
|
||||
"""Simply print unformatted text. This is the ultimate fallback."""
|
||||
for text in generator:
|
||||
if not color:
|
||||
text = strip_ansi(text)
|
||||
stream.write(text)
|
||||
|
||||
|
||||
class Editor(object):
|
||||
|
||||
def __init__(self, editor=None, env=None, require_save=True,
|
||||
extension='.txt'):
|
||||
self.editor = editor
|
||||
self.env = env
|
||||
self.require_save = require_save
|
||||
self.extension = extension
|
||||
|
||||
def get_editor(self):
|
||||
if self.editor is not None:
|
||||
return self.editor
|
||||
for key in 'VISUAL', 'EDITOR':
|
||||
rv = os.environ.get(key)
|
||||
if rv:
|
||||
return rv
|
||||
if WIN:
|
||||
return 'notepad'
|
||||
for editor in 'vim', 'nano':
|
||||
if os.system('which %s >/dev/null 2>&1' % editor) == 0:
|
||||
return editor
|
||||
return 'vi'
|
||||
|
||||
def edit_file(self, filename):
|
||||
import subprocess
|
||||
editor = self.get_editor()
|
||||
if self.env:
|
||||
environ = os.environ.copy()
|
||||
environ.update(self.env)
|
||||
else:
|
||||
environ = None
|
||||
try:
|
||||
c = subprocess.Popen('%s "%s"' % (editor, filename),
|
||||
env=environ, shell=True)
|
||||
exit_code = c.wait()
|
||||
if exit_code != 0:
|
||||
raise ClickException('%s: Editing failed!' % editor)
|
||||
except OSError as e:
|
||||
raise ClickException('%s: Editing failed: %s' % (editor, e))
|
||||
|
||||
def edit(self, text):
|
||||
import tempfile
|
||||
|
||||
text = text or ''
|
||||
if text and not text.endswith('\n'):
|
||||
text += '\n'
|
||||
|
||||
fd, name = tempfile.mkstemp(prefix='editor-', suffix=self.extension)
|
||||
try:
|
||||
if WIN:
|
||||
encoding = 'utf-8-sig'
|
||||
text = text.replace('\n', '\r\n')
|
||||
else:
|
||||
encoding = 'utf-8'
|
||||
text = text.encode(encoding)
|
||||
|
||||
f = os.fdopen(fd, 'wb')
|
||||
f.write(text)
|
||||
f.close()
|
||||
timestamp = os.path.getmtime(name)
|
||||
|
||||
self.edit_file(name)
|
||||
|
||||
if self.require_save \
|
||||
and os.path.getmtime(name) == timestamp:
|
||||
return None
|
||||
|
||||
f = open(name, 'rb')
|
||||
try:
|
||||
rv = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
return rv.decode('utf-8-sig').replace('\r\n', '\n')
|
||||
finally:
|
||||
os.unlink(name)
|
||||
|
||||
|
||||
def open_url(url, wait=False, locate=False):
|
||||
import subprocess
|
||||
|
||||
def _unquote_file(url):
|
||||
try:
|
||||
import urllib
|
||||
except ImportError:
|
||||
import urllib
|
||||
if url.startswith('file://'):
|
||||
url = urllib.unquote(url[7:])
|
||||
return url
|
||||
|
||||
if sys.platform == 'darwin':
|
||||
args = ['open']
|
||||
if wait:
|
||||
args.append('-W')
|
||||
if locate:
|
||||
args.append('-R')
|
||||
args.append(_unquote_file(url))
|
||||
null = open('/dev/null', 'w')
|
||||
try:
|
||||
return subprocess.Popen(args, stderr=null).wait()
|
||||
finally:
|
||||
null.close()
|
||||
elif WIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = 'explorer /select,"%s"' % _unquote_file(
|
||||
url.replace('"', ''))
|
||||
else:
|
||||
args = 'start %s "" "%s"' % (
|
||||
wait and '/WAIT' or '', url.replace('"', ''))
|
||||
return os.system(args)
|
||||
elif CYGWIN:
|
||||
if locate:
|
||||
url = _unquote_file(url)
|
||||
args = 'cygstart "%s"' % (os.path.dirname(url).replace('"', ''))
|
||||
else:
|
||||
args = 'cygstart %s "%s"' % (
|
||||
wait and '-w' or '', url.replace('"', ''))
|
||||
return os.system(args)
|
||||
|
||||
try:
|
||||
if locate:
|
||||
url = os.path.dirname(_unquote_file(url)) or '.'
|
||||
else:
|
||||
url = _unquote_file(url)
|
||||
c = subprocess.Popen(['xdg-open', url])
|
||||
if wait:
|
||||
return c.wait()
|
||||
return 0
|
||||
except OSError:
|
||||
if url.startswith(('http://', 'https://')) and not locate and not wait:
|
||||
import webbrowser
|
||||
webbrowser.open(url)
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def _translate_ch_to_exc(ch):
|
||||
if ch == u'\x03':
|
||||
raise KeyboardInterrupt()
|
||||
if ch == u'\x04' and not WIN: # Unix-like, Ctrl+D
|
||||
raise EOFError()
|
||||
if ch == u'\x1a' and WIN: # Windows, Ctrl+Z
|
||||
raise EOFError()
|
||||
|
||||
|
||||
if WIN:
|
||||
import msvcrt
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal():
|
||||
yield
|
||||
|
||||
def getchar(echo):
|
||||
# The function `getch` will return a bytes object corresponding to
|
||||
# the pressed character. Since Windows 10 build 1803, it will also
|
||||
# return \x00 when called a second time after pressing a regular key.
|
||||
#
|
||||
# `getwch` does not share this probably-bugged behavior. Moreover, it
|
||||
# returns a Unicode object by default, which is what we want.
|
||||
#
|
||||
# Either of these functions will return \x00 or \xe0 to indicate
|
||||
# a special key, and you need to call the same function again to get
|
||||
# the "rest" of the code. The fun part is that \u00e0 is
|
||||
# "latin small letter a with grave", so if you type that on a French
|
||||
# keyboard, you _also_ get a \xe0.
|
||||
# E.g., consider the Up arrow. This returns \xe0 and then \x48. The
|
||||
# resulting Unicode string reads as "a with grave" + "capital H".
|
||||
# This is indistinguishable from when the user actually types
|
||||
# "a with grave" and then "capital H".
|
||||
#
|
||||
# When \xe0 is returned, we assume it's part of a special-key sequence
|
||||
# and call `getwch` again, but that means that when the user types
|
||||
# the \u00e0 character, `getchar` doesn't return until a second
|
||||
# character is typed.
|
||||
# The alternative is returning immediately, but that would mess up
|
||||
# cross-platform handling of arrow keys and others that start with
|
||||
# \xe0. Another option is using `getch`, but then we can't reliably
|
||||
# read non-ASCII characters, because return values of `getch` are
|
||||
# limited to the current 8-bit codepage.
|
||||
#
|
||||
# Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
|
||||
# is doing the right thing in more situations than with `getch`.
|
||||
if echo:
|
||||
func = msvcrt.getwche
|
||||
else:
|
||||
func = msvcrt.getwch
|
||||
|
||||
rv = func()
|
||||
if rv in (u'\x00', u'\xe0'):
|
||||
# \x00 and \xe0 are control characters that indicate special key,
|
||||
# see above.
|
||||
rv += func()
|
||||
_translate_ch_to_exc(rv)
|
||||
return rv
|
||||
else:
|
||||
import tty
|
||||
import termios
|
||||
|
||||
@contextlib.contextmanager
|
||||
def raw_terminal():
|
||||
if not isatty(sys.stdin):
|
||||
f = open('/dev/tty')
|
||||
fd = f.fileno()
|
||||
else:
|
||||
fd = sys.stdin.fileno()
|
||||
f = None
|
||||
try:
|
||||
old_settings = termios.tcgetattr(fd)
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
yield fd
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
||||
sys.stdout.flush()
|
||||
if f is not None:
|
||||
f.close()
|
||||
except termios.error:
|
||||
pass
|
||||
|
||||
def getchar(echo):
|
||||
with raw_terminal() as fd:
|
||||
ch = os.read(fd, 32)
|
||||
ch = ch.decode(get_best_encoding(sys.stdin), 'replace')
|
||||
if echo and isatty(sys.stdout):
|
||||
sys.stdout.write(ch)
|
||||
_translate_ch_to_exc(ch)
|
||||
return ch
|
||||
@@ -1,38 +0,0 @@
|
||||
import textwrap
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class TextWrapper(textwrap.TextWrapper):
|
||||
|
||||
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
|
||||
space_left = max(width - cur_len, 1)
|
||||
|
||||
if self.break_long_words:
|
||||
last = reversed_chunks[-1]
|
||||
cut = last[:space_left]
|
||||
res = last[space_left:]
|
||||
cur_line.append(cut)
|
||||
reversed_chunks[-1] = res
|
||||
elif not cur_line:
|
||||
cur_line.append(reversed_chunks.pop())
|
||||
|
||||
@contextmanager
|
||||
def extra_indent(self, indent):
|
||||
old_initial_indent = self.initial_indent
|
||||
old_subsequent_indent = self.subsequent_indent
|
||||
self.initial_indent += indent
|
||||
self.subsequent_indent += indent
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.initial_indent = old_initial_indent
|
||||
self.subsequent_indent = old_subsequent_indent
|
||||
|
||||
def indent_only(self, text):
|
||||
rv = []
|
||||
for idx, line in enumerate(text.splitlines()):
|
||||
indent = self.initial_indent
|
||||
if idx > 0:
|
||||
indent = self.subsequent_indent
|
||||
rv.append(indent + line)
|
||||
return '\n'.join(rv)
|
||||
@@ -1,125 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import codecs
|
||||
|
||||
from ._compat import PY2
|
||||
|
||||
|
||||
# If someone wants to vendor click, we want to ensure the
|
||||
# correct package is discovered. Ideally we could use a
|
||||
# relative import here but unfortunately Python does not
|
||||
# support that.
|
||||
click = sys.modules[__name__.rsplit('.', 1)[0]]
|
||||
|
||||
|
||||
def _find_unicode_literals_frame():
|
||||
import __future__
|
||||
if not hasattr(sys, '_getframe'): # not all Python implementations have it
|
||||
return 0
|
||||
frm = sys._getframe(1)
|
||||
idx = 1
|
||||
while frm is not None:
|
||||
if frm.f_globals.get('__name__', '').startswith('click.'):
|
||||
frm = frm.f_back
|
||||
idx += 1
|
||||
elif frm.f_code.co_flags & __future__.unicode_literals.compiler_flag:
|
||||
return idx
|
||||
else:
|
||||
break
|
||||
return 0
|
||||
|
||||
|
||||
def _check_for_unicode_literals():
|
||||
if not __debug__:
|
||||
return
|
||||
if not PY2 or click.disable_unicode_literals_warning:
|
||||
return
|
||||
bad_frame = _find_unicode_literals_frame()
|
||||
if bad_frame <= 0:
|
||||
return
|
||||
from warnings import warn
|
||||
warn(Warning('Click detected the use of the unicode_literals '
|
||||
'__future__ import. This is heavily discouraged '
|
||||
'because it can introduce subtle bugs in your '
|
||||
'code. You should instead use explicit u"" literals '
|
||||
'for your unicode strings. For more information see '
|
||||
'https://click.palletsprojects.com/python3/'),
|
||||
stacklevel=bad_frame)
|
||||
|
||||
|
||||
def _verify_python3_env():
|
||||
"""Ensures that the environment is good for unicode on Python 3."""
|
||||
if PY2:
|
||||
return
|
||||
try:
|
||||
import locale
|
||||
fs_enc = codecs.lookup(locale.getpreferredencoding()).name
|
||||
except Exception:
|
||||
fs_enc = 'ascii'
|
||||
if fs_enc != 'ascii':
|
||||
return
|
||||
|
||||
extra = ''
|
||||
if os.name == 'posix':
|
||||
import subprocess
|
||||
try:
|
||||
rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE).communicate()[0]
|
||||
except OSError:
|
||||
rv = b''
|
||||
good_locales = set()
|
||||
has_c_utf8 = False
|
||||
|
||||
# Make sure we're operating on text here.
|
||||
if isinstance(rv, bytes):
|
||||
rv = rv.decode('ascii', 'replace')
|
||||
|
||||
for line in rv.splitlines():
|
||||
locale = line.strip()
|
||||
if locale.lower().endswith(('.utf-8', '.utf8')):
|
||||
good_locales.add(locale)
|
||||
if locale.lower() in ('c.utf8', 'c.utf-8'):
|
||||
has_c_utf8 = True
|
||||
|
||||
extra += '\n\n'
|
||||
if not good_locales:
|
||||
extra += (
|
||||
'Additional information: on this system no suitable UTF-8\n'
|
||||
'locales were discovered. This most likely requires resolving\n'
|
||||
'by reconfiguring the locale system.'
|
||||
)
|
||||
elif has_c_utf8:
|
||||
extra += (
|
||||
'This system supports the C.UTF-8 locale which is recommended.\n'
|
||||
'You might be able to resolve your issue by exporting the\n'
|
||||
'following environment variables:\n\n'
|
||||
' export LC_ALL=C.UTF-8\n'
|
||||
' export LANG=C.UTF-8'
|
||||
)
|
||||
else:
|
||||
extra += (
|
||||
'This system lists a couple of UTF-8 supporting locales that\n'
|
||||
'you can pick from. The following suitable locales were\n'
|
||||
'discovered: %s'
|
||||
) % ', '.join(sorted(good_locales))
|
||||
|
||||
bad_locale = None
|
||||
for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'):
|
||||
if locale and locale.lower().endswith(('.utf-8', '.utf8')):
|
||||
bad_locale = locale
|
||||
if locale is not None:
|
||||
break
|
||||
if bad_locale is not None:
|
||||
extra += (
|
||||
'\n\nClick discovered that you exported a UTF-8 locale\n'
|
||||
'but the locale system could not pick up from it because\n'
|
||||
'it does not exist. The exported locale is "%s" but it\n'
|
||||
'is not supported'
|
||||
) % bad_locale
|
||||
|
||||
raise RuntimeError(
|
||||
'Click will abort further execution because Python 3 was'
|
||||
' configured to use ASCII as encoding for the environment.'
|
||||
' Consult https://click.palletsprojects.com/en/7.x/python3/ for'
|
||||
' mitigation steps.' + extra
|
||||
)
|
||||
@@ -1,307 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# This module is based on the excellent work by Adam Bartoš who
|
||||
# provided a lot of what went into the implementation here in
|
||||
# the discussion to issue1602 in the Python bug tracker.
|
||||
#
|
||||
# There are some general differences in regards to how this works
|
||||
# compared to the original patches as we do not need to patch
|
||||
# the entire interpreter but just work in our little world of
|
||||
# echo and prmopt.
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import zlib
|
||||
import time
|
||||
import ctypes
|
||||
import msvcrt
|
||||
from ._compat import _NonClosingTextIOWrapper, text_type, PY2
|
||||
from ctypes import byref, POINTER, c_int, c_char, c_char_p, \
|
||||
c_void_p, py_object, c_ssize_t, c_ulong, windll, WINFUNCTYPE
|
||||
try:
|
||||
from ctypes import pythonapi
|
||||
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
|
||||
PyBuffer_Release = pythonapi.PyBuffer_Release
|
||||
except ImportError:
|
||||
pythonapi = None
|
||||
from ctypes.wintypes import LPWSTR, LPCWSTR
|
||||
|
||||
|
||||
c_ssize_p = POINTER(c_ssize_t)
|
||||
|
||||
kernel32 = windll.kernel32
|
||||
GetStdHandle = kernel32.GetStdHandle
|
||||
ReadConsoleW = kernel32.ReadConsoleW
|
||||
WriteConsoleW = kernel32.WriteConsoleW
|
||||
GetLastError = kernel32.GetLastError
|
||||
GetCommandLineW = WINFUNCTYPE(LPWSTR)(
|
||||
('GetCommandLineW', windll.kernel32))
|
||||
CommandLineToArgvW = WINFUNCTYPE(
|
||||
POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
|
||||
('CommandLineToArgvW', windll.shell32))
|
||||
|
||||
|
||||
STDIN_HANDLE = GetStdHandle(-10)
|
||||
STDOUT_HANDLE = GetStdHandle(-11)
|
||||
STDERR_HANDLE = GetStdHandle(-12)
|
||||
|
||||
|
||||
PyBUF_SIMPLE = 0
|
||||
PyBUF_WRITABLE = 1
|
||||
|
||||
ERROR_SUCCESS = 0
|
||||
ERROR_NOT_ENOUGH_MEMORY = 8
|
||||
ERROR_OPERATION_ABORTED = 995
|
||||
|
||||
STDIN_FILENO = 0
|
||||
STDOUT_FILENO = 1
|
||||
STDERR_FILENO = 2
|
||||
|
||||
EOF = b'\x1a'
|
||||
MAX_BYTES_WRITTEN = 32767
|
||||
|
||||
|
||||
class Py_buffer(ctypes.Structure):
|
||||
_fields_ = [
|
||||
('buf', c_void_p),
|
||||
('obj', py_object),
|
||||
('len', c_ssize_t),
|
||||
('itemsize', c_ssize_t),
|
||||
('readonly', c_int),
|
||||
('ndim', c_int),
|
||||
('format', c_char_p),
|
||||
('shape', c_ssize_p),
|
||||
('strides', c_ssize_p),
|
||||
('suboffsets', c_ssize_p),
|
||||
('internal', c_void_p)
|
||||
]
|
||||
|
||||
if PY2:
|
||||
_fields_.insert(-1, ('smalltable', c_ssize_t * 2))
|
||||
|
||||
|
||||
# On PyPy we cannot get buffers so our ability to operate here is
|
||||
# serverly limited.
|
||||
if pythonapi is None:
|
||||
get_buffer = None
|
||||
else:
|
||||
def get_buffer(obj, writable=False):
|
||||
buf = Py_buffer()
|
||||
flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
|
||||
PyObject_GetBuffer(py_object(obj), byref(buf), flags)
|
||||
try:
|
||||
buffer_type = c_char * buf.len
|
||||
return buffer_type.from_address(buf.buf)
|
||||
finally:
|
||||
PyBuffer_Release(byref(buf))
|
||||
|
||||
|
||||
class _WindowsConsoleRawIOBase(io.RawIOBase):
|
||||
|
||||
def __init__(self, handle):
|
||||
self.handle = handle
|
||||
|
||||
def isatty(self):
|
||||
io.RawIOBase.isatty(self)
|
||||
return True
|
||||
|
||||
|
||||
class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
|
||||
|
||||
def readable(self):
|
||||
return True
|
||||
|
||||
def readinto(self, b):
|
||||
bytes_to_be_read = len(b)
|
||||
if not bytes_to_be_read:
|
||||
return 0
|
||||
elif bytes_to_be_read % 2:
|
||||
raise ValueError('cannot read odd number of bytes from '
|
||||
'UTF-16-LE encoded console')
|
||||
|
||||
buffer = get_buffer(b, writable=True)
|
||||
code_units_to_be_read = bytes_to_be_read // 2
|
||||
code_units_read = c_ulong()
|
||||
|
||||
rv = ReadConsoleW(self.handle, buffer, code_units_to_be_read,
|
||||
byref(code_units_read), None)
|
||||
if GetLastError() == ERROR_OPERATION_ABORTED:
|
||||
# wait for KeyboardInterrupt
|
||||
time.sleep(0.1)
|
||||
if not rv:
|
||||
raise OSError('Windows error: %s' % GetLastError())
|
||||
|
||||
if buffer[0] == EOF:
|
||||
return 0
|
||||
return 2 * code_units_read.value
|
||||
|
||||
|
||||
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
|
||||
|
||||
def writable(self):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_error_message(errno):
|
||||
if errno == ERROR_SUCCESS:
|
||||
return 'ERROR_SUCCESS'
|
||||
elif errno == ERROR_NOT_ENOUGH_MEMORY:
|
||||
return 'ERROR_NOT_ENOUGH_MEMORY'
|
||||
return 'Windows error %s' % errno
|
||||
|
||||
def write(self, b):
|
||||
bytes_to_be_written = len(b)
|
||||
buf = get_buffer(b)
|
||||
code_units_to_be_written = min(bytes_to_be_written,
|
||||
MAX_BYTES_WRITTEN) // 2
|
||||
code_units_written = c_ulong()
|
||||
|
||||
WriteConsoleW(self.handle, buf, code_units_to_be_written,
|
||||
byref(code_units_written), None)
|
||||
bytes_written = 2 * code_units_written.value
|
||||
|
||||
if bytes_written == 0 and bytes_to_be_written > 0:
|
||||
raise OSError(self._get_error_message(GetLastError()))
|
||||
return bytes_written
|
||||
|
||||
|
||||
class ConsoleStream(object):
|
||||
|
||||
def __init__(self, text_stream, byte_stream):
|
||||
self._text_stream = text_stream
|
||||
self.buffer = byte_stream
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.buffer.name
|
||||
|
||||
def write(self, x):
|
||||
if isinstance(x, text_type):
|
||||
return self._text_stream.write(x)
|
||||
try:
|
||||
self.flush()
|
||||
except Exception:
|
||||
pass
|
||||
return self.buffer.write(x)
|
||||
|
||||
def writelines(self, lines):
|
||||
for line in lines:
|
||||
self.write(line)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._text_stream, name)
|
||||
|
||||
def isatty(self):
|
||||
return self.buffer.isatty()
|
||||
|
||||
def __repr__(self):
|
||||
return '<ConsoleStream name=%r encoding=%r>' % (
|
||||
self.name,
|
||||
self.encoding,
|
||||
)
|
||||
|
||||
|
||||
class WindowsChunkedWriter(object):
|
||||
"""
|
||||
Wraps a stream (such as stdout), acting as a transparent proxy for all
|
||||
attribute access apart from method 'write()' which we wrap to write in
|
||||
limited chunks due to a Windows limitation on binary console streams.
|
||||
"""
|
||||
def __init__(self, wrapped):
|
||||
# double-underscore everything to prevent clashes with names of
|
||||
# attributes on the wrapped stream object.
|
||||
self.__wrapped = wrapped
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.__wrapped, name)
|
||||
|
||||
def write(self, text):
|
||||
total_to_write = len(text)
|
||||
written = 0
|
||||
|
||||
while written < total_to_write:
|
||||
to_write = min(total_to_write - written, MAX_BYTES_WRITTEN)
|
||||
self.__wrapped.write(text[written:written+to_write])
|
||||
written += to_write
|
||||
|
||||
|
||||
_wrapped_std_streams = set()
|
||||
|
||||
|
||||
def _wrap_std_stream(name):
|
||||
# Python 2 & Windows 7 and below
|
||||
if PY2 and sys.getwindowsversion()[:2] <= (6, 1) and name not in _wrapped_std_streams:
|
||||
setattr(sys, name, WindowsChunkedWriter(getattr(sys, name)))
|
||||
_wrapped_std_streams.add(name)
|
||||
|
||||
|
||||
def _get_text_stdin(buffer_stream):
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
|
||||
'utf-16-le', 'strict', line_buffering=True)
|
||||
return ConsoleStream(text_stream, buffer_stream)
|
||||
|
||||
|
||||
def _get_text_stdout(buffer_stream):
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
|
||||
'utf-16-le', 'strict', line_buffering=True)
|
||||
return ConsoleStream(text_stream, buffer_stream)
|
||||
|
||||
|
||||
def _get_text_stderr(buffer_stream):
|
||||
text_stream = _NonClosingTextIOWrapper(
|
||||
io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
|
||||
'utf-16-le', 'strict', line_buffering=True)
|
||||
return ConsoleStream(text_stream, buffer_stream)
|
||||
|
||||
|
||||
if PY2:
|
||||
def _hash_py_argv():
|
||||
return zlib.crc32('\x00'.join(sys.argv[1:]))
|
||||
|
||||
_initial_argv_hash = _hash_py_argv()
|
||||
|
||||
def _get_windows_argv():
|
||||
argc = c_int(0)
|
||||
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
|
||||
argv = [argv_unicode[i] for i in range(0, argc.value)]
|
||||
|
||||
if not hasattr(sys, 'frozen'):
|
||||
argv = argv[1:]
|
||||
while len(argv) > 0:
|
||||
arg = argv[0]
|
||||
if not arg.startswith('-') or arg == '-':
|
||||
break
|
||||
argv = argv[1:]
|
||||
if arg.startswith(('-c', '-m')):
|
||||
break
|
||||
|
||||
return argv[1:]
|
||||
|
||||
|
||||
_stream_factories = {
|
||||
0: _get_text_stdin,
|
||||
1: _get_text_stdout,
|
||||
2: _get_text_stderr,
|
||||
}
|
||||
|
||||
|
||||
def _get_windows_console_stream(f, encoding, errors):
|
||||
if get_buffer is not None and \
|
||||
encoding in ('utf-16-le', None) \
|
||||
and errors in ('strict', None) and \
|
||||
hasattr(f, 'isatty') and f.isatty():
|
||||
func = _stream_factories.get(f.fileno())
|
||||
if func is not None:
|
||||
if not PY2:
|
||||
f = getattr(f, 'buffer', None)
|
||||
if f is None:
|
||||
return None
|
||||
else:
|
||||
# If we are on Python 2 we need to set the stream that we
|
||||
# deal with to binary mode as otherwise the exercise if a
|
||||
# bit moot. The same problems apply as for
|
||||
# get_binary_stdin and friends from _compat.
|
||||
msvcrt.setmode(f.fileno(), os.O_BINARY)
|
||||
return func(f)
|
||||
1856
python/click/core.py
1856
python/click/core.py
File diff suppressed because it is too large
Load Diff
@@ -1,311 +0,0 @@
|
||||
import sys
|
||||
import inspect
|
||||
|
||||
from functools import update_wrapper
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._unicodefun import _check_for_unicode_literals
|
||||
from .utils import echo
|
||||
from .globals import get_current_context
|
||||
|
||||
|
||||
def pass_context(f):
|
||||
"""Marks a callback as wanting to receive the current context
|
||||
object as first argument.
|
||||
"""
|
||||
def new_func(*args, **kwargs):
|
||||
return f(get_current_context(), *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def pass_obj(f):
|
||||
"""Similar to :func:`pass_context`, but only pass the object on the
|
||||
context onwards (:attr:`Context.obj`). This is useful if that object
|
||||
represents the state of a nested system.
|
||||
"""
|
||||
def new_func(*args, **kwargs):
|
||||
return f(get_current_context().obj, *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
|
||||
|
||||
def make_pass_decorator(object_type, ensure=False):
|
||||
"""Given an object type this creates a decorator that will work
|
||||
similar to :func:`pass_obj` but instead of passing the object of the
|
||||
current context, it will find the innermost context of type
|
||||
:func:`object_type`.
|
||||
|
||||
This generates a decorator that works roughly like this::
|
||||
|
||||
from functools import update_wrapper
|
||||
|
||||
def decorator(f):
|
||||
@pass_context
|
||||
def new_func(ctx, *args, **kwargs):
|
||||
obj = ctx.find_object(object_type)
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
return decorator
|
||||
|
||||
:param object_type: the type of the object to pass.
|
||||
:param ensure: if set to `True`, a new object will be created and
|
||||
remembered on the context if it's not there yet.
|
||||
"""
|
||||
def decorator(f):
|
||||
def new_func(*args, **kwargs):
|
||||
ctx = get_current_context()
|
||||
if ensure:
|
||||
obj = ctx.ensure_object(object_type)
|
||||
else:
|
||||
obj = ctx.find_object(object_type)
|
||||
if obj is None:
|
||||
raise RuntimeError('Managed to invoke callback without a '
|
||||
'context object of type %r existing'
|
||||
% object_type.__name__)
|
||||
return ctx.invoke(f, obj, *args, **kwargs)
|
||||
return update_wrapper(new_func, f)
|
||||
return decorator
|
||||
|
||||
|
||||
def _make_command(f, name, attrs, cls):
|
||||
if isinstance(f, Command):
|
||||
raise TypeError('Attempted to convert a callback into a '
|
||||
'command twice.')
|
||||
try:
|
||||
params = f.__click_params__
|
||||
params.reverse()
|
||||
del f.__click_params__
|
||||
except AttributeError:
|
||||
params = []
|
||||
help = attrs.get('help')
|
||||
if help is None:
|
||||
help = inspect.getdoc(f)
|
||||
if isinstance(help, bytes):
|
||||
help = help.decode('utf-8')
|
||||
else:
|
||||
help = inspect.cleandoc(help)
|
||||
attrs['help'] = help
|
||||
_check_for_unicode_literals()
|
||||
return cls(name=name or f.__name__.lower().replace('_', '-'),
|
||||
callback=f, params=params, **attrs)
|
||||
|
||||
|
||||
def command(name=None, cls=None, **attrs):
|
||||
r"""Creates a new :class:`Command` and uses the decorated function as
|
||||
callback. This will also automatically attach all decorated
|
||||
:func:`option`\s and :func:`argument`\s as parameters to the command.
|
||||
|
||||
The name of the command defaults to the name of the function. If you
|
||||
want to change that, you can pass the intended name as the first
|
||||
argument.
|
||||
|
||||
All keyword arguments are forwarded to the underlying command class.
|
||||
|
||||
Once decorated the function turns into a :class:`Command` instance
|
||||
that can be invoked as a command line utility or be attached to a
|
||||
command :class:`Group`.
|
||||
|
||||
:param name: the name of the command. This defaults to the function
|
||||
name with underscores replaced by dashes.
|
||||
:param cls: the command class to instantiate. This defaults to
|
||||
:class:`Command`.
|
||||
"""
|
||||
if cls is None:
|
||||
cls = Command
|
||||
def decorator(f):
|
||||
cmd = _make_command(f, name, attrs, cls)
|
||||
cmd.__doc__ = f.__doc__
|
||||
return cmd
|
||||
return decorator
|
||||
|
||||
|
||||
def group(name=None, **attrs):
|
||||
"""Creates a new :class:`Group` with a function as callback. This
|
||||
works otherwise the same as :func:`command` just that the `cls`
|
||||
parameter is set to :class:`Group`.
|
||||
"""
|
||||
attrs.setdefault('cls', Group)
|
||||
return command(name, **attrs)
|
||||
|
||||
|
||||
def _param_memo(f, param):
|
||||
if isinstance(f, Command):
|
||||
f.params.append(param)
|
||||
else:
|
||||
if not hasattr(f, '__click_params__'):
|
||||
f.__click_params__ = []
|
||||
f.__click_params__.append(param)
|
||||
|
||||
|
||||
def argument(*param_decls, **attrs):
|
||||
"""Attaches an argument to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Argument`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Argument` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
:param cls: the argument class to instantiate. This defaults to
|
||||
:class:`Argument`.
|
||||
"""
|
||||
def decorator(f):
|
||||
ArgumentClass = attrs.pop('cls', Argument)
|
||||
_param_memo(f, ArgumentClass(param_decls, **attrs))
|
||||
return f
|
||||
return decorator
|
||||
|
||||
|
||||
def option(*param_decls, **attrs):
|
||||
"""Attaches an option to the command. All positional arguments are
|
||||
passed as parameter declarations to :class:`Option`; all keyword
|
||||
arguments are forwarded unchanged (except ``cls``).
|
||||
This is equivalent to creating an :class:`Option` instance manually
|
||||
and attaching it to the :attr:`Command.params` list.
|
||||
|
||||
:param cls: the option class to instantiate. This defaults to
|
||||
:class:`Option`.
|
||||
"""
|
||||
def decorator(f):
|
||||
# Issue 926, copy attrs, so pre-defined options can re-use the same cls=
|
||||
option_attrs = attrs.copy()
|
||||
|
||||
if 'help' in option_attrs:
|
||||
option_attrs['help'] = inspect.cleandoc(option_attrs['help'])
|
||||
OptionClass = option_attrs.pop('cls', Option)
|
||||
_param_memo(f, OptionClass(param_decls, **option_attrs))
|
||||
return f
|
||||
return decorator
|
||||
|
||||
|
||||
def confirmation_option(*param_decls, **attrs):
|
||||
"""Shortcut for confirmation prompts that can be ignored by passing
|
||||
``--yes`` as parameter.
|
||||
|
||||
This is equivalent to decorating a function with :func:`option` with
|
||||
the following parameters::
|
||||
|
||||
def callback(ctx, param, value):
|
||||
if not value:
|
||||
ctx.abort()
|
||||
|
||||
@click.command()
|
||||
@click.option('--yes', is_flag=True, callback=callback,
|
||||
expose_value=False, prompt='Do you want to continue?')
|
||||
def dropdb():
|
||||
pass
|
||||
"""
|
||||
def decorator(f):
|
||||
def callback(ctx, param, value):
|
||||
if not value:
|
||||
ctx.abort()
|
||||
attrs.setdefault('is_flag', True)
|
||||
attrs.setdefault('callback', callback)
|
||||
attrs.setdefault('expose_value', False)
|
||||
attrs.setdefault('prompt', 'Do you want to continue?')
|
||||
attrs.setdefault('help', 'Confirm the action without prompting.')
|
||||
return option(*(param_decls or ('--yes',)), **attrs)(f)
|
||||
return decorator
|
||||
|
||||
|
||||
def password_option(*param_decls, **attrs):
|
||||
"""Shortcut for password prompts.
|
||||
|
||||
This is equivalent to decorating a function with :func:`option` with
|
||||
the following parameters::
|
||||
|
||||
@click.command()
|
||||
@click.option('--password', prompt=True, confirmation_prompt=True,
|
||||
hide_input=True)
|
||||
def changeadmin(password):
|
||||
pass
|
||||
"""
|
||||
def decorator(f):
|
||||
attrs.setdefault('prompt', True)
|
||||
attrs.setdefault('confirmation_prompt', True)
|
||||
attrs.setdefault('hide_input', True)
|
||||
return option(*(param_decls or ('--password',)), **attrs)(f)
|
||||
return decorator
|
||||
|
||||
|
||||
def version_option(version=None, *param_decls, **attrs):
|
||||
"""Adds a ``--version`` option which immediately ends the program
|
||||
printing out the version number. This is implemented as an eager
|
||||
option that prints the version and exits the program in the callback.
|
||||
|
||||
:param version: the version number to show. If not provided Click
|
||||
attempts an auto discovery via setuptools.
|
||||
:param prog_name: the name of the program (defaults to autodetection)
|
||||
:param message: custom message to show instead of the default
|
||||
(``'%(prog)s, version %(version)s'``)
|
||||
:param others: everything else is forwarded to :func:`option`.
|
||||
"""
|
||||
if version is None:
|
||||
if hasattr(sys, '_getframe'):
|
||||
module = sys._getframe(1).f_globals.get('__name__')
|
||||
else:
|
||||
module = ''
|
||||
|
||||
def decorator(f):
|
||||
prog_name = attrs.pop('prog_name', None)
|
||||
message = attrs.pop('message', '%(prog)s, version %(version)s')
|
||||
|
||||
def callback(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
prog = prog_name
|
||||
if prog is None:
|
||||
prog = ctx.find_root().info_name
|
||||
ver = version
|
||||
if ver is None:
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
for dist in pkg_resources.working_set:
|
||||
scripts = dist.get_entry_map().get('console_scripts') or {}
|
||||
for script_name, entry_point in iteritems(scripts):
|
||||
if entry_point.module_name == module:
|
||||
ver = dist.version
|
||||
break
|
||||
if ver is None:
|
||||
raise RuntimeError('Could not determine version')
|
||||
echo(message % {
|
||||
'prog': prog,
|
||||
'version': ver,
|
||||
}, color=ctx.color)
|
||||
ctx.exit()
|
||||
|
||||
attrs.setdefault('is_flag', True)
|
||||
attrs.setdefault('expose_value', False)
|
||||
attrs.setdefault('is_eager', True)
|
||||
attrs.setdefault('help', 'Show the version and exit.')
|
||||
attrs['callback'] = callback
|
||||
return option(*(param_decls or ('--version',)), **attrs)(f)
|
||||
return decorator
|
||||
|
||||
|
||||
def help_option(*param_decls, **attrs):
|
||||
"""Adds a ``--help`` option which immediately ends the program
|
||||
printing out the help page. This is usually unnecessary to add as
|
||||
this is added by default to all commands unless suppressed.
|
||||
|
||||
Like :func:`version_option`, this is implemented as eager option that
|
||||
prints in the callback and exits.
|
||||
|
||||
All arguments are forwarded to :func:`option`.
|
||||
"""
|
||||
def decorator(f):
|
||||
def callback(ctx, param, value):
|
||||
if value and not ctx.resilient_parsing:
|
||||
echo(ctx.get_help(), color=ctx.color)
|
||||
ctx.exit()
|
||||
attrs.setdefault('is_flag', True)
|
||||
attrs.setdefault('expose_value', False)
|
||||
attrs.setdefault('help', 'Show this message and exit.')
|
||||
attrs.setdefault('is_eager', True)
|
||||
attrs['callback'] = callback
|
||||
return option(*(param_decls or ('--help',)), **attrs)(f)
|
||||
return decorator
|
||||
|
||||
|
||||
# Circular dependencies between core and decorators
|
||||
from .core import Command, Group, Argument, Option
|
||||
@@ -1,235 +0,0 @@
|
||||
from ._compat import PY2, filename_to_ui, get_text_stderr
|
||||
from .utils import echo
|
||||
|
||||
|
||||
def _join_param_hints(param_hint):
|
||||
if isinstance(param_hint, (tuple, list)):
|
||||
return ' / '.join('"%s"' % x for x in param_hint)
|
||||
return param_hint
|
||||
|
||||
|
||||
class ClickException(Exception):
|
||||
"""An exception that Click can handle and show to the user."""
|
||||
|
||||
#: The exit code for this exception
|
||||
exit_code = 1
|
||||
|
||||
def __init__(self, message):
|
||||
ctor_msg = message
|
||||
if PY2:
|
||||
if ctor_msg is not None:
|
||||
ctor_msg = ctor_msg.encode('utf-8')
|
||||
Exception.__init__(self, ctor_msg)
|
||||
self.message = message
|
||||
|
||||
def format_message(self):
|
||||
return self.message
|
||||
|
||||
def __str__(self):
|
||||
return self.message
|
||||
|
||||
if PY2:
|
||||
__unicode__ = __str__
|
||||
|
||||
def __str__(self):
|
||||
return self.message.encode('utf-8')
|
||||
|
||||
def show(self, file=None):
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
echo('Error: %s' % self.format_message(), file=file)
|
||||
|
||||
|
||||
class UsageError(ClickException):
|
||||
"""An internal exception that signals a usage error. This typically
|
||||
aborts any further handling.
|
||||
|
||||
:param message: the error message to display.
|
||||
:param ctx: optionally the context that caused this error. Click will
|
||||
fill in the context automatically in some situations.
|
||||
"""
|
||||
exit_code = 2
|
||||
|
||||
def __init__(self, message, ctx=None):
|
||||
ClickException.__init__(self, message)
|
||||
self.ctx = ctx
|
||||
self.cmd = self.ctx and self.ctx.command or None
|
||||
|
||||
def show(self, file=None):
|
||||
if file is None:
|
||||
file = get_text_stderr()
|
||||
color = None
|
||||
hint = ''
|
||||
if (self.cmd is not None and
|
||||
self.cmd.get_help_option(self.ctx) is not None):
|
||||
hint = ('Try "%s %s" for help.\n'
|
||||
% (self.ctx.command_path, self.ctx.help_option_names[0]))
|
||||
if self.ctx is not None:
|
||||
color = self.ctx.color
|
||||
echo(self.ctx.get_usage() + '\n%s' % hint, file=file, color=color)
|
||||
echo('Error: %s' % self.format_message(), file=file, color=color)
|
||||
|
||||
|
||||
class BadParameter(UsageError):
|
||||
"""An exception that formats out a standardized error message for a
|
||||
bad parameter. This is useful when thrown from a callback or type as
|
||||
Click will attach contextual information to it (for instance, which
|
||||
parameter it is).
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param param: the parameter object that caused this error. This can
|
||||
be left out, and Click will attach this info itself
|
||||
if possible.
|
||||
:param param_hint: a string that shows up as parameter name. This
|
||||
can be used as alternative to `param` in cases
|
||||
where custom validation should happen. If it is
|
||||
a string it's used as such, if it's a list then
|
||||
each item is quoted and separated.
|
||||
"""
|
||||
|
||||
def __init__(self, message, ctx=None, param=None,
|
||||
param_hint=None):
|
||||
UsageError.__init__(self, message, ctx)
|
||||
self.param = param
|
||||
self.param_hint = param_hint
|
||||
|
||||
def format_message(self):
|
||||
if self.param_hint is not None:
|
||||
param_hint = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx)
|
||||
else:
|
||||
return 'Invalid value: %s' % self.message
|
||||
param_hint = _join_param_hints(param_hint)
|
||||
|
||||
return 'Invalid value for %s: %s' % (param_hint, self.message)
|
||||
|
||||
|
||||
class MissingParameter(BadParameter):
|
||||
"""Raised if click required an option or argument but it was not
|
||||
provided when invoking the script.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param param_type: a string that indicates the type of the parameter.
|
||||
The default is to inherit the parameter type from
|
||||
the given `param`. Valid values are ``'parameter'``,
|
||||
``'option'`` or ``'argument'``.
|
||||
"""
|
||||
|
||||
def __init__(self, message=None, ctx=None, param=None,
|
||||
param_hint=None, param_type=None):
|
||||
BadParameter.__init__(self, message, ctx, param, param_hint)
|
||||
self.param_type = param_type
|
||||
|
||||
def format_message(self):
|
||||
if self.param_hint is not None:
|
||||
param_hint = self.param_hint
|
||||
elif self.param is not None:
|
||||
param_hint = self.param.get_error_hint(self.ctx)
|
||||
else:
|
||||
param_hint = None
|
||||
param_hint = _join_param_hints(param_hint)
|
||||
|
||||
param_type = self.param_type
|
||||
if param_type is None and self.param is not None:
|
||||
param_type = self.param.param_type_name
|
||||
|
||||
msg = self.message
|
||||
if self.param is not None:
|
||||
msg_extra = self.param.type.get_missing_message(self.param)
|
||||
if msg_extra:
|
||||
if msg:
|
||||
msg += '. ' + msg_extra
|
||||
else:
|
||||
msg = msg_extra
|
||||
|
||||
return 'Missing %s%s%s%s' % (
|
||||
param_type,
|
||||
param_hint and ' %s' % param_hint or '',
|
||||
msg and '. ' or '.',
|
||||
msg or '',
|
||||
)
|
||||
|
||||
|
||||
class NoSuchOption(UsageError):
|
||||
"""Raised if click attempted to handle an option that does not
|
||||
exist.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
"""
|
||||
|
||||
def __init__(self, option_name, message=None, possibilities=None,
|
||||
ctx=None):
|
||||
if message is None:
|
||||
message = 'no such option: %s' % option_name
|
||||
UsageError.__init__(self, message, ctx)
|
||||
self.option_name = option_name
|
||||
self.possibilities = possibilities
|
||||
|
||||
def format_message(self):
|
||||
bits = [self.message]
|
||||
if self.possibilities:
|
||||
if len(self.possibilities) == 1:
|
||||
bits.append('Did you mean %s?' % self.possibilities[0])
|
||||
else:
|
||||
possibilities = sorted(self.possibilities)
|
||||
bits.append('(Possible options: %s)' % ', '.join(possibilities))
|
||||
return ' '.join(bits)
|
||||
|
||||
|
||||
class BadOptionUsage(UsageError):
|
||||
"""Raised if an option is generally supplied but the use of the option
|
||||
was incorrect. This is for instance raised if the number of arguments
|
||||
for an option is not correct.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
|
||||
:param option_name: the name of the option being used incorrectly.
|
||||
"""
|
||||
|
||||
def __init__(self, option_name, message, ctx=None):
|
||||
UsageError.__init__(self, message, ctx)
|
||||
self.option_name = option_name
|
||||
|
||||
|
||||
class BadArgumentUsage(UsageError):
|
||||
"""Raised if an argument is generally supplied but the use of the argument
|
||||
was incorrect. This is for instance raised if the number of values
|
||||
for an argument is not correct.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
"""
|
||||
|
||||
def __init__(self, message, ctx=None):
|
||||
UsageError.__init__(self, message, ctx)
|
||||
|
||||
|
||||
class FileError(ClickException):
|
||||
"""Raised if a file cannot be opened."""
|
||||
|
||||
def __init__(self, filename, hint=None):
|
||||
ui_filename = filename_to_ui(filename)
|
||||
if hint is None:
|
||||
hint = 'unknown error'
|
||||
ClickException.__init__(self, hint)
|
||||
self.ui_filename = ui_filename
|
||||
self.filename = filename
|
||||
|
||||
def format_message(self):
|
||||
return 'Could not open file %s: %s' % (self.ui_filename, self.message)
|
||||
|
||||
|
||||
class Abort(RuntimeError):
|
||||
"""An internal signalling exception that signals Click to abort."""
|
||||
|
||||
|
||||
class Exit(RuntimeError):
|
||||
"""An exception that indicates that the application should exit with some
|
||||
status code.
|
||||
|
||||
:param code: the status code to exit with.
|
||||
"""
|
||||
def __init__(self, code=0):
|
||||
self.exit_code = code
|
||||
@@ -1,256 +0,0 @@
|
||||
from contextlib import contextmanager
|
||||
from .termui import get_terminal_size
|
||||
from .parser import split_opt
|
||||
from ._compat import term_len
|
||||
|
||||
|
||||
# Can force a width. This is used by the test system
|
||||
FORCED_WIDTH = None
|
||||
|
||||
|
||||
def measure_table(rows):
|
||||
widths = {}
|
||||
for row in rows:
|
||||
for idx, col in enumerate(row):
|
||||
widths[idx] = max(widths.get(idx, 0), term_len(col))
|
||||
return tuple(y for x, y in sorted(widths.items()))
|
||||
|
||||
|
||||
def iter_rows(rows, col_count):
|
||||
for row in rows:
|
||||
row = tuple(row)
|
||||
yield row + ('',) * (col_count - len(row))
|
||||
|
||||
|
||||
def wrap_text(text, width=78, initial_indent='', subsequent_indent='',
|
||||
preserve_paragraphs=False):
|
||||
"""A helper function that intelligently wraps text. By default, it
|
||||
assumes that it operates on a single paragraph of text but if the
|
||||
`preserve_paragraphs` parameter is provided it will intelligently
|
||||
handle paragraphs (defined by two empty lines).
|
||||
|
||||
If paragraphs are handled, a paragraph can be prefixed with an empty
|
||||
line containing the ``\\b`` character (``\\x08``) to indicate that
|
||||
no rewrapping should happen in that block.
|
||||
|
||||
:param text: the text that should be rewrapped.
|
||||
:param width: the maximum width for the text.
|
||||
:param initial_indent: the initial indent that should be placed on the
|
||||
first line as a string.
|
||||
:param subsequent_indent: the indent string that should be placed on
|
||||
each consecutive line.
|
||||
:param preserve_paragraphs: if this flag is set then the wrapping will
|
||||
intelligently handle paragraphs.
|
||||
"""
|
||||
from ._textwrap import TextWrapper
|
||||
text = text.expandtabs()
|
||||
wrapper = TextWrapper(width, initial_indent=initial_indent,
|
||||
subsequent_indent=subsequent_indent,
|
||||
replace_whitespace=False)
|
||||
if not preserve_paragraphs:
|
||||
return wrapper.fill(text)
|
||||
|
||||
p = []
|
||||
buf = []
|
||||
indent = None
|
||||
|
||||
def _flush_par():
|
||||
if not buf:
|
||||
return
|
||||
if buf[0].strip() == '\b':
|
||||
p.append((indent or 0, True, '\n'.join(buf[1:])))
|
||||
else:
|
||||
p.append((indent or 0, False, ' '.join(buf)))
|
||||
del buf[:]
|
||||
|
||||
for line in text.splitlines():
|
||||
if not line:
|
||||
_flush_par()
|
||||
indent = None
|
||||
else:
|
||||
if indent is None:
|
||||
orig_len = term_len(line)
|
||||
line = line.lstrip()
|
||||
indent = orig_len - term_len(line)
|
||||
buf.append(line)
|
||||
_flush_par()
|
||||
|
||||
rv = []
|
||||
for indent, raw, text in p:
|
||||
with wrapper.extra_indent(' ' * indent):
|
||||
if raw:
|
||||
rv.append(wrapper.indent_only(text))
|
||||
else:
|
||||
rv.append(wrapper.fill(text))
|
||||
|
||||
return '\n\n'.join(rv)
|
||||
|
||||
|
||||
class HelpFormatter(object):
|
||||
"""This class helps with formatting text-based help pages. It's
|
||||
usually just needed for very special internal cases, but it's also
|
||||
exposed so that developers can write their own fancy outputs.
|
||||
|
||||
At present, it always writes into memory.
|
||||
|
||||
:param indent_increment: the additional increment for each level.
|
||||
:param width: the width for the text. This defaults to the terminal
|
||||
width clamped to a maximum of 78.
|
||||
"""
|
||||
|
||||
def __init__(self, indent_increment=2, width=None, max_width=None):
|
||||
self.indent_increment = indent_increment
|
||||
if max_width is None:
|
||||
max_width = 80
|
||||
if width is None:
|
||||
width = FORCED_WIDTH
|
||||
if width is None:
|
||||
width = max(min(get_terminal_size()[0], max_width) - 2, 50)
|
||||
self.width = width
|
||||
self.current_indent = 0
|
||||
self.buffer = []
|
||||
|
||||
def write(self, string):
|
||||
"""Writes a unicode string into the internal buffer."""
|
||||
self.buffer.append(string)
|
||||
|
||||
def indent(self):
|
||||
"""Increases the indentation."""
|
||||
self.current_indent += self.indent_increment
|
||||
|
||||
def dedent(self):
|
||||
"""Decreases the indentation."""
|
||||
self.current_indent -= self.indent_increment
|
||||
|
||||
def write_usage(self, prog, args='', prefix='Usage: '):
|
||||
"""Writes a usage line into the buffer.
|
||||
|
||||
:param prog: the program name.
|
||||
:param args: whitespace separated list of arguments.
|
||||
:param prefix: the prefix for the first line.
|
||||
"""
|
||||
usage_prefix = '%*s%s ' % (self.current_indent, prefix, prog)
|
||||
text_width = self.width - self.current_indent
|
||||
|
||||
if text_width >= (term_len(usage_prefix) + 20):
|
||||
# The arguments will fit to the right of the prefix.
|
||||
indent = ' ' * term_len(usage_prefix)
|
||||
self.write(wrap_text(args, text_width,
|
||||
initial_indent=usage_prefix,
|
||||
subsequent_indent=indent))
|
||||
else:
|
||||
# The prefix is too long, put the arguments on the next line.
|
||||
self.write(usage_prefix)
|
||||
self.write('\n')
|
||||
indent = ' ' * (max(self.current_indent, term_len(prefix)) + 4)
|
||||
self.write(wrap_text(args, text_width,
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent))
|
||||
|
||||
self.write('\n')
|
||||
|
||||
def write_heading(self, heading):
|
||||
"""Writes a heading into the buffer."""
|
||||
self.write('%*s%s:\n' % (self.current_indent, '', heading))
|
||||
|
||||
def write_paragraph(self):
|
||||
"""Writes a paragraph into the buffer."""
|
||||
if self.buffer:
|
||||
self.write('\n')
|
||||
|
||||
def write_text(self, text):
|
||||
"""Writes re-indented text into the buffer. This rewraps and
|
||||
preserves paragraphs.
|
||||
"""
|
||||
text_width = max(self.width - self.current_indent, 11)
|
||||
indent = ' ' * self.current_indent
|
||||
self.write(wrap_text(text, text_width,
|
||||
initial_indent=indent,
|
||||
subsequent_indent=indent,
|
||||
preserve_paragraphs=True))
|
||||
self.write('\n')
|
||||
|
||||
def write_dl(self, rows, col_max=30, col_spacing=2):
|
||||
"""Writes a definition list into the buffer. This is how options
|
||||
and commands are usually formatted.
|
||||
|
||||
:param rows: a list of two item tuples for the terms and values.
|
||||
:param col_max: the maximum width of the first column.
|
||||
:param col_spacing: the number of spaces between the first and
|
||||
second column.
|
||||
"""
|
||||
rows = list(rows)
|
||||
widths = measure_table(rows)
|
||||
if len(widths) != 2:
|
||||
raise TypeError('Expected two columns for definition list')
|
||||
|
||||
first_col = min(widths[0], col_max) + col_spacing
|
||||
|
||||
for first, second in iter_rows(rows, len(widths)):
|
||||
self.write('%*s%s' % (self.current_indent, '', first))
|
||||
if not second:
|
||||
self.write('\n')
|
||||
continue
|
||||
if term_len(first) <= first_col - col_spacing:
|
||||
self.write(' ' * (first_col - term_len(first)))
|
||||
else:
|
||||
self.write('\n')
|
||||
self.write(' ' * (first_col + self.current_indent))
|
||||
|
||||
text_width = max(self.width - first_col - 2, 10)
|
||||
lines = iter(wrap_text(second, text_width).splitlines())
|
||||
if lines:
|
||||
self.write(next(lines) + '\n')
|
||||
for line in lines:
|
||||
self.write('%*s%s\n' % (
|
||||
first_col + self.current_indent, '', line))
|
||||
else:
|
||||
self.write('\n')
|
||||
|
||||
@contextmanager
|
||||
def section(self, name):
|
||||
"""Helpful context manager that writes a paragraph, a heading,
|
||||
and the indents.
|
||||
|
||||
:param name: the section name that is written as heading.
|
||||
"""
|
||||
self.write_paragraph()
|
||||
self.write_heading(name)
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
@contextmanager
|
||||
def indentation(self):
|
||||
"""A context manager that increases the indentation."""
|
||||
self.indent()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.dedent()
|
||||
|
||||
def getvalue(self):
|
||||
"""Returns the buffer contents."""
|
||||
return ''.join(self.buffer)
|
||||
|
||||
|
||||
def join_options(options):
|
||||
"""Given a list of option strings this joins them in the most appropriate
|
||||
way and returns them in the form ``(formatted_string,
|
||||
any_prefix_is_slash)`` where the second item in the tuple is a flag that
|
||||
indicates if any of the option prefixes was a slash.
|
||||
"""
|
||||
rv = []
|
||||
any_prefix_is_slash = False
|
||||
for opt in options:
|
||||
prefix = split_opt(opt)[0]
|
||||
if prefix == '/':
|
||||
any_prefix_is_slash = True
|
||||
rv.append((len(prefix), opt))
|
||||
|
||||
rv.sort(key=lambda x: x[0])
|
||||
|
||||
rv = ', '.join(x[1] for x in rv)
|
||||
return rv, any_prefix_is_slash
|
||||
@@ -1,48 +0,0 @@
|
||||
from threading import local
|
||||
|
||||
|
||||
_local = local()
|
||||
|
||||
|
||||
def get_current_context(silent=False):
|
||||
"""Returns the current click context. This can be used as a way to
|
||||
access the current context object from anywhere. This is a more implicit
|
||||
alternative to the :func:`pass_context` decorator. This function is
|
||||
primarily useful for helpers such as :func:`echo` which might be
|
||||
interested in changing its behavior based on the current context.
|
||||
|
||||
To push the current context, :meth:`Context.scope` can be used.
|
||||
|
||||
.. versionadded:: 5.0
|
||||
|
||||
:param silent: is set to `True` the return value is `None` if no context
|
||||
is available. The default behavior is to raise a
|
||||
:exc:`RuntimeError`.
|
||||
"""
|
||||
try:
|
||||
return getattr(_local, 'stack')[-1]
|
||||
except (AttributeError, IndexError):
|
||||
if not silent:
|
||||
raise RuntimeError('There is no active click context.')
|
||||
|
||||
|
||||
def push_context(ctx):
|
||||
"""Pushes a new context to the current stack."""
|
||||
_local.__dict__.setdefault('stack', []).append(ctx)
|
||||
|
||||
|
||||
def pop_context():
|
||||
"""Removes the top level from the stack."""
|
||||
_local.stack.pop()
|
||||
|
||||
|
||||
def resolve_color_default(color=None):
|
||||
""""Internal helper to get the default value of the color flag. If a
|
||||
value is passed it's returned unchanged, otherwise it's looked up from
|
||||
the current context.
|
||||
"""
|
||||
if color is not None:
|
||||
return color
|
||||
ctx = get_current_context(silent=True)
|
||||
if ctx is not None:
|
||||
return ctx.color
|
||||
@@ -1,427 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
click.parser
|
||||
~~~~~~~~~~~~
|
||||
|
||||
This module started out as largely a copy paste from the stdlib's
|
||||
optparse module with the features removed that we do not need from
|
||||
optparse because we implement them in Click on a higher level (for
|
||||
instance type handling, help formatting and a lot more).
|
||||
|
||||
The plan is to remove more and more from here over time.
|
||||
|
||||
The reason this is a different module and not optparse from the stdlib
|
||||
is that there are differences in 2.x and 3.x about the error messages
|
||||
generated and optparse in the stdlib uses gettext for no good reason
|
||||
and might cause us issues.
|
||||
"""
|
||||
|
||||
import re
|
||||
from collections import deque
|
||||
from .exceptions import UsageError, NoSuchOption, BadOptionUsage, \
|
||||
BadArgumentUsage
|
||||
|
||||
|
||||
def _unpack_args(args, nargs_spec):
|
||||
"""Given an iterable of arguments and an iterable of nargs specifications,
|
||||
it returns a tuple with all the unpacked arguments at the first index
|
||||
and all remaining arguments as the second.
|
||||
|
||||
The nargs specification is the number of arguments that should be consumed
|
||||
or `-1` to indicate that this position should eat up all the remainders.
|
||||
|
||||
Missing items are filled with `None`.
|
||||
"""
|
||||
args = deque(args)
|
||||
nargs_spec = deque(nargs_spec)
|
||||
rv = []
|
||||
spos = None
|
||||
|
||||
def _fetch(c):
|
||||
try:
|
||||
if spos is None:
|
||||
return c.popleft()
|
||||
else:
|
||||
return c.pop()
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
while nargs_spec:
|
||||
nargs = _fetch(nargs_spec)
|
||||
if nargs == 1:
|
||||
rv.append(_fetch(args))
|
||||
elif nargs > 1:
|
||||
x = [_fetch(args) for _ in range(nargs)]
|
||||
# If we're reversed, we're pulling in the arguments in reverse,
|
||||
# so we need to turn them around.
|
||||
if spos is not None:
|
||||
x.reverse()
|
||||
rv.append(tuple(x))
|
||||
elif nargs < 0:
|
||||
if spos is not None:
|
||||
raise TypeError('Cannot have two nargs < 0')
|
||||
spos = len(rv)
|
||||
rv.append(None)
|
||||
|
||||
# spos is the position of the wildcard (star). If it's not `None`,
|
||||
# we fill it with the remainder.
|
||||
if spos is not None:
|
||||
rv[spos] = tuple(args)
|
||||
args = []
|
||||
rv[spos + 1:] = reversed(rv[spos + 1:])
|
||||
|
||||
return tuple(rv), list(args)
|
||||
|
||||
|
||||
def _error_opt_args(nargs, opt):
|
||||
if nargs == 1:
|
||||
raise BadOptionUsage(opt, '%s option requires an argument' % opt)
|
||||
raise BadOptionUsage(opt, '%s option requires %d arguments' % (opt, nargs))
|
||||
|
||||
|
||||
def split_opt(opt):
|
||||
first = opt[:1]
|
||||
if first.isalnum():
|
||||
return '', opt
|
||||
if opt[1:2] == first:
|
||||
return opt[:2], opt[2:]
|
||||
return first, opt[1:]
|
||||
|
||||
|
||||
def normalize_opt(opt, ctx):
|
||||
if ctx is None or ctx.token_normalize_func is None:
|
||||
return opt
|
||||
prefix, opt = split_opt(opt)
|
||||
return prefix + ctx.token_normalize_func(opt)
|
||||
|
||||
|
||||
def split_arg_string(string):
|
||||
"""Given an argument string this attempts to split it into small parts."""
|
||||
rv = []
|
||||
for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'"
|
||||
r'|"([^"\\]*(?:\\.[^"\\]*)*)"'
|
||||
r'|\S+)\s*', string, re.S):
|
||||
arg = match.group().strip()
|
||||
if arg[:1] == arg[-1:] and arg[:1] in '"\'':
|
||||
arg = arg[1:-1].encode('ascii', 'backslashreplace') \
|
||||
.decode('unicode-escape')
|
||||
try:
|
||||
arg = type(string)(arg)
|
||||
except UnicodeError:
|
||||
pass
|
||||
rv.append(arg)
|
||||
return rv
|
||||
|
||||
|
||||
class Option(object):
|
||||
|
||||
def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):
|
||||
self._short_opts = []
|
||||
self._long_opts = []
|
||||
self.prefixes = set()
|
||||
|
||||
for opt in opts:
|
||||
prefix, value = split_opt(opt)
|
||||
if not prefix:
|
||||
raise ValueError('Invalid start character for option (%s)'
|
||||
% opt)
|
||||
self.prefixes.add(prefix[0])
|
||||
if len(prefix) == 1 and len(value) == 1:
|
||||
self._short_opts.append(opt)
|
||||
else:
|
||||
self._long_opts.append(opt)
|
||||
self.prefixes.add(prefix)
|
||||
|
||||
if action is None:
|
||||
action = 'store'
|
||||
|
||||
self.dest = dest
|
||||
self.action = action
|
||||
self.nargs = nargs
|
||||
self.const = const
|
||||
self.obj = obj
|
||||
|
||||
@property
|
||||
def takes_value(self):
|
||||
return self.action in ('store', 'append')
|
||||
|
||||
def process(self, value, state):
|
||||
if self.action == 'store':
|
||||
state.opts[self.dest] = value
|
||||
elif self.action == 'store_const':
|
||||
state.opts[self.dest] = self.const
|
||||
elif self.action == 'append':
|
||||
state.opts.setdefault(self.dest, []).append(value)
|
||||
elif self.action == 'append_const':
|
||||
state.opts.setdefault(self.dest, []).append(self.const)
|
||||
elif self.action == 'count':
|
||||
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1
|
||||
else:
|
||||
raise ValueError('unknown action %r' % self.action)
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class Argument(object):
|
||||
|
||||
def __init__(self, dest, nargs=1, obj=None):
|
||||
self.dest = dest
|
||||
self.nargs = nargs
|
||||
self.obj = obj
|
||||
|
||||
def process(self, value, state):
|
||||
if self.nargs > 1:
|
||||
holes = sum(1 for x in value if x is None)
|
||||
if holes == len(value):
|
||||
value = None
|
||||
elif holes != 0:
|
||||
raise BadArgumentUsage('argument %s takes %d values'
|
||||
% (self.dest, self.nargs))
|
||||
state.opts[self.dest] = value
|
||||
state.order.append(self.obj)
|
||||
|
||||
|
||||
class ParsingState(object):
|
||||
|
||||
def __init__(self, rargs):
|
||||
self.opts = {}
|
||||
self.largs = []
|
||||
self.rargs = rargs
|
||||
self.order = []
|
||||
|
||||
|
||||
class OptionParser(object):
|
||||
"""The option parser is an internal class that is ultimately used to
|
||||
parse options and arguments. It's modelled after optparse and brings
|
||||
a similar but vastly simplified API. It should generally not be used
|
||||
directly as the high level Click classes wrap it for you.
|
||||
|
||||
It's not nearly as extensible as optparse or argparse as it does not
|
||||
implement features that are implemented on a higher level (such as
|
||||
types or defaults).
|
||||
|
||||
:param ctx: optionally the :class:`~click.Context` where this parser
|
||||
should go with.
|
||||
"""
|
||||
|
||||
def __init__(self, ctx=None):
|
||||
#: The :class:`~click.Context` for this parser. This might be
|
||||
#: `None` for some advanced use cases.
|
||||
self.ctx = ctx
|
||||
#: This controls how the parser deals with interspersed arguments.
|
||||
#: If this is set to `False`, the parser will stop on the first
|
||||
#: non-option. Click uses this to implement nested subcommands
|
||||
#: safely.
|
||||
self.allow_interspersed_args = True
|
||||
#: This tells the parser how to deal with unknown options. By
|
||||
#: default it will error out (which is sensible), but there is a
|
||||
#: second mode where it will ignore it and continue processing
|
||||
#: after shifting all the unknown options into the resulting args.
|
||||
self.ignore_unknown_options = False
|
||||
if ctx is not None:
|
||||
self.allow_interspersed_args = ctx.allow_interspersed_args
|
||||
self.ignore_unknown_options = ctx.ignore_unknown_options
|
||||
self._short_opt = {}
|
||||
self._long_opt = {}
|
||||
self._opt_prefixes = set(['-', '--'])
|
||||
self._args = []
|
||||
|
||||
def add_option(self, opts, dest, action=None, nargs=1, const=None,
|
||||
obj=None):
|
||||
"""Adds a new option named `dest` to the parser. The destination
|
||||
is not inferred (unlike with optparse) and needs to be explicitly
|
||||
provided. Action can be any of ``store``, ``store_const``,
|
||||
``append``, ``appnd_const`` or ``count``.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
if obj is None:
|
||||
obj = dest
|
||||
opts = [normalize_opt(opt, self.ctx) for opt in opts]
|
||||
option = Option(opts, dest, action=action, nargs=nargs,
|
||||
const=const, obj=obj)
|
||||
self._opt_prefixes.update(option.prefixes)
|
||||
for opt in option._short_opts:
|
||||
self._short_opt[opt] = option
|
||||
for opt in option._long_opts:
|
||||
self._long_opt[opt] = option
|
||||
|
||||
def add_argument(self, dest, nargs=1, obj=None):
|
||||
"""Adds a positional argument named `dest` to the parser.
|
||||
|
||||
The `obj` can be used to identify the option in the order list
|
||||
that is returned from the parser.
|
||||
"""
|
||||
if obj is None:
|
||||
obj = dest
|
||||
self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
|
||||
|
||||
def parse_args(self, args):
|
||||
"""Parses positional arguments and returns ``(values, args, order)``
|
||||
for the parsed options and arguments as well as the leftover
|
||||
arguments if there are any. The order is a list of objects as they
|
||||
appear on the command line. If arguments appear multiple times they
|
||||
will be memorized multiple times as well.
|
||||
"""
|
||||
state = ParsingState(args)
|
||||
try:
|
||||
self._process_args_for_options(state)
|
||||
self._process_args_for_args(state)
|
||||
except UsageError:
|
||||
if self.ctx is None or not self.ctx.resilient_parsing:
|
||||
raise
|
||||
return state.opts, state.largs, state.order
|
||||
|
||||
def _process_args_for_args(self, state):
|
||||
pargs, args = _unpack_args(state.largs + state.rargs,
|
||||
[x.nargs for x in self._args])
|
||||
|
||||
for idx, arg in enumerate(self._args):
|
||||
arg.process(pargs[idx], state)
|
||||
|
||||
state.largs = args
|
||||
state.rargs = []
|
||||
|
||||
def _process_args_for_options(self, state):
|
||||
while state.rargs:
|
||||
arg = state.rargs.pop(0)
|
||||
arglen = len(arg)
|
||||
# Double dashes always handled explicitly regardless of what
|
||||
# prefixes are valid.
|
||||
if arg == '--':
|
||||
return
|
||||
elif arg[:1] in self._opt_prefixes and arglen > 1:
|
||||
self._process_opts(arg, state)
|
||||
elif self.allow_interspersed_args:
|
||||
state.largs.append(arg)
|
||||
else:
|
||||
state.rargs.insert(0, arg)
|
||||
return
|
||||
|
||||
# Say this is the original argument list:
|
||||
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
|
||||
# ^
|
||||
# (we are about to process arg(i)).
|
||||
#
|
||||
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
|
||||
# [arg0, ..., arg(i-1)] (any options and their arguments will have
|
||||
# been removed from largs).
|
||||
#
|
||||
# The while loop will usually consume 1 or more arguments per pass.
|
||||
# If it consumes 1 (eg. arg is an option that takes no arguments),
|
||||
# then after _process_arg() is done the situation is:
|
||||
#
|
||||
# largs = subset of [arg0, ..., arg(i)]
|
||||
# rargs = [arg(i+1), ..., arg(N-1)]
|
||||
#
|
||||
# If allow_interspersed_args is false, largs will always be
|
||||
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
|
||||
# not a very interesting subset!
|
||||
|
||||
def _match_long_opt(self, opt, explicit_value, state):
|
||||
if opt not in self._long_opt:
|
||||
possibilities = [word for word in self._long_opt
|
||||
if word.startswith(opt)]
|
||||
raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
|
||||
|
||||
option = self._long_opt[opt]
|
||||
if option.takes_value:
|
||||
# At this point it's safe to modify rargs by injecting the
|
||||
# explicit value, because no exception is raised in this
|
||||
# branch. This means that the inserted value will be fully
|
||||
# consumed.
|
||||
if explicit_value is not None:
|
||||
state.rargs.insert(0, explicit_value)
|
||||
|
||||
nargs = option.nargs
|
||||
if len(state.rargs) < nargs:
|
||||
_error_opt_args(nargs, opt)
|
||||
elif nargs == 1:
|
||||
value = state.rargs.pop(0)
|
||||
else:
|
||||
value = tuple(state.rargs[:nargs])
|
||||
del state.rargs[:nargs]
|
||||
|
||||
elif explicit_value is not None:
|
||||
raise BadOptionUsage(opt, '%s option does not take a value' % opt)
|
||||
|
||||
else:
|
||||
value = None
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
def _match_short_opt(self, arg, state):
|
||||
stop = False
|
||||
i = 1
|
||||
prefix = arg[0]
|
||||
unknown_options = []
|
||||
|
||||
for ch in arg[1:]:
|
||||
opt = normalize_opt(prefix + ch, self.ctx)
|
||||
option = self._short_opt.get(opt)
|
||||
i += 1
|
||||
|
||||
if not option:
|
||||
if self.ignore_unknown_options:
|
||||
unknown_options.append(ch)
|
||||
continue
|
||||
raise NoSuchOption(opt, ctx=self.ctx)
|
||||
if option.takes_value:
|
||||
# Any characters left in arg? Pretend they're the
|
||||
# next arg, and stop consuming characters of arg.
|
||||
if i < len(arg):
|
||||
state.rargs.insert(0, arg[i:])
|
||||
stop = True
|
||||
|
||||
nargs = option.nargs
|
||||
if len(state.rargs) < nargs:
|
||||
_error_opt_args(nargs, opt)
|
||||
elif nargs == 1:
|
||||
value = state.rargs.pop(0)
|
||||
else:
|
||||
value = tuple(state.rargs[:nargs])
|
||||
del state.rargs[:nargs]
|
||||
|
||||
else:
|
||||
value = None
|
||||
|
||||
option.process(value, state)
|
||||
|
||||
if stop:
|
||||
break
|
||||
|
||||
# If we got any unknown options we re-combinate the string of the
|
||||
# remaining options and re-attach the prefix, then report that
|
||||
# to the state as new larg. This way there is basic combinatorics
|
||||
# that can be achieved while still ignoring unknown arguments.
|
||||
if self.ignore_unknown_options and unknown_options:
|
||||
state.largs.append(prefix + ''.join(unknown_options))
|
||||
|
||||
def _process_opts(self, arg, state):
|
||||
explicit_value = None
|
||||
# Long option handling happens in two parts. The first part is
|
||||
# supporting explicitly attached values. In any case, we will try
|
||||
# to long match the option first.
|
||||
if '=' in arg:
|
||||
long_opt, explicit_value = arg.split('=', 1)
|
||||
else:
|
||||
long_opt = arg
|
||||
norm_long_opt = normalize_opt(long_opt, self.ctx)
|
||||
|
||||
# At this point we will match the (assumed) long option through
|
||||
# the long option matching code. Note that this allows options
|
||||
# like "-foo" to be matched as long options.
|
||||
try:
|
||||
self._match_long_opt(norm_long_opt, explicit_value, state)
|
||||
except NoSuchOption:
|
||||
# At this point the long option matching failed, and we need
|
||||
# to try with short options. However there is a special rule
|
||||
# which says, that if we have a two character options prefix
|
||||
# (applies to "--foo" for instance), we do not dispatch to the
|
||||
# short option code and will instead raise the no option
|
||||
# error.
|
||||
if arg[:2] not in self._opt_prefixes:
|
||||
return self._match_short_opt(arg, state)
|
||||
if not self.ignore_unknown_options:
|
||||
raise
|
||||
state.largs.append(arg)
|
||||
@@ -1,606 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import struct
|
||||
import inspect
|
||||
import itertools
|
||||
|
||||
from ._compat import raw_input, text_type, string_types, \
|
||||
isatty, strip_ansi, get_winterm_size, DEFAULT_COLUMNS, WIN
|
||||
from .utils import echo
|
||||
from .exceptions import Abort, UsageError
|
||||
from .types import convert_type, Choice, Path
|
||||
from .globals import resolve_color_default
|
||||
|
||||
|
||||
# The prompt functions to use. The doc tools currently override these
|
||||
# functions to customize how they work.
|
||||
visible_prompt_func = raw_input
|
||||
|
||||
_ansi_colors = {
|
||||
'black': 30,
|
||||
'red': 31,
|
||||
'green': 32,
|
||||
'yellow': 33,
|
||||
'blue': 34,
|
||||
'magenta': 35,
|
||||
'cyan': 36,
|
||||
'white': 37,
|
||||
'reset': 39,
|
||||
'bright_black': 90,
|
||||
'bright_red': 91,
|
||||
'bright_green': 92,
|
||||
'bright_yellow': 93,
|
||||
'bright_blue': 94,
|
||||
'bright_magenta': 95,
|
||||
'bright_cyan': 96,
|
||||
'bright_white': 97,
|
||||
}
|
||||
_ansi_reset_all = '\033[0m'
|
||||
|
||||
|
||||
def hidden_prompt_func(prompt):
|
||||
import getpass
|
||||
return getpass.getpass(prompt)
|
||||
|
||||
|
||||
def _build_prompt(text, suffix, show_default=False, default=None, show_choices=True, type=None):
|
||||
prompt = text
|
||||
if type is not None and show_choices and isinstance(type, Choice):
|
||||
prompt += ' (' + ", ".join(map(str, type.choices)) + ')'
|
||||
if default is not None and show_default:
|
||||
prompt = '%s [%s]' % (prompt, default)
|
||||
return prompt + suffix
|
||||
|
||||
|
||||
def prompt(text, default=None, hide_input=False, confirmation_prompt=False,
|
||||
type=None, value_proc=None, prompt_suffix=': ', show_default=True,
|
||||
err=False, show_choices=True):
|
||||
"""Prompts a user for input. This is a convenience function that can
|
||||
be used to prompt a user for input later.
|
||||
|
||||
If the user aborts the input by sending a interrupt signal, this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
.. versionadded:: 7.0
|
||||
Added the show_choices parameter.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
Added unicode support for cmd.exe on Windows.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param text: the text to show for the prompt.
|
||||
:param default: the default value to use if no input happens. If this
|
||||
is not given it will prompt until it's aborted.
|
||||
:param hide_input: if this is set to true then the input value will
|
||||
be hidden.
|
||||
:param confirmation_prompt: asks for confirmation for the value.
|
||||
:param type: the type to use to check the value against.
|
||||
:param value_proc: if this parameter is provided it's a function that
|
||||
is invoked instead of the type conversion to
|
||||
convert a value.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
:param show_choices: Show or hide choices if the passed type is a Choice.
|
||||
For example if type is a Choice of either day or week,
|
||||
show_choices is true and text is "Group by" then the
|
||||
prompt will be "Group by (day, week): ".
|
||||
"""
|
||||
result = None
|
||||
|
||||
def prompt_func(text):
|
||||
f = hide_input and hidden_prompt_func or visible_prompt_func
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(text, nl=False, err=err)
|
||||
return f('')
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
# getpass doesn't print a newline if the user aborts input with ^C.
|
||||
# Allegedly this behavior is inherited from getpass(3).
|
||||
# A doc bug has been filed at https://bugs.python.org/issue24711
|
||||
if hide_input:
|
||||
echo(None, err=err)
|
||||
raise Abort()
|
||||
|
||||
if value_proc is None:
|
||||
value_proc = convert_type(type, default)
|
||||
|
||||
prompt = _build_prompt(text, prompt_suffix, show_default, default, show_choices, type)
|
||||
|
||||
while 1:
|
||||
while 1:
|
||||
value = prompt_func(prompt)
|
||||
if value:
|
||||
break
|
||||
elif default is not None:
|
||||
if isinstance(value_proc, Path):
|
||||
# validate Path default value(exists, dir_okay etc.)
|
||||
value = default
|
||||
break
|
||||
return default
|
||||
try:
|
||||
result = value_proc(value)
|
||||
except UsageError as e:
|
||||
echo('Error: %s' % e.message, err=err)
|
||||
continue
|
||||
if not confirmation_prompt:
|
||||
return result
|
||||
while 1:
|
||||
value2 = prompt_func('Repeat for confirmation: ')
|
||||
if value2:
|
||||
break
|
||||
if value == value2:
|
||||
return result
|
||||
echo('Error: the two entered values do not match', err=err)
|
||||
|
||||
|
||||
def confirm(text, default=False, abort=False, prompt_suffix=': ',
|
||||
show_default=True, err=False):
|
||||
"""Prompts for confirmation (yes/no question).
|
||||
|
||||
If the user aborts the input by sending a interrupt signal this
|
||||
function will catch it and raise a :exc:`Abort` exception.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param text: the question to ask.
|
||||
:param default: the default for the prompt.
|
||||
:param abort: if this is set to `True` a negative answer aborts the
|
||||
exception by raising :exc:`Abort`.
|
||||
:param prompt_suffix: a suffix that should be added to the prompt.
|
||||
:param show_default: shows or hides the default value in the prompt.
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
"""
|
||||
prompt = _build_prompt(text, prompt_suffix, show_default,
|
||||
default and 'Y/n' or 'y/N')
|
||||
while 1:
|
||||
try:
|
||||
# Write the prompt separately so that we get nice
|
||||
# coloring through colorama on Windows
|
||||
echo(prompt, nl=False, err=err)
|
||||
value = visible_prompt_func('').lower().strip()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
raise Abort()
|
||||
if value in ('y', 'yes'):
|
||||
rv = True
|
||||
elif value in ('n', 'no'):
|
||||
rv = False
|
||||
elif value == '':
|
||||
rv = default
|
||||
else:
|
||||
echo('Error: invalid input', err=err)
|
||||
continue
|
||||
break
|
||||
if abort and not rv:
|
||||
raise Abort()
|
||||
return rv
|
||||
|
||||
|
||||
def get_terminal_size():
|
||||
"""Returns the current size of the terminal as tuple in the form
|
||||
``(width, height)`` in columns and rows.
|
||||
"""
|
||||
# If shutil has get_terminal_size() (Python 3.3 and later) use that
|
||||
if sys.version_info >= (3, 3):
|
||||
import shutil
|
||||
shutil_get_terminal_size = getattr(shutil, 'get_terminal_size', None)
|
||||
if shutil_get_terminal_size:
|
||||
sz = shutil_get_terminal_size()
|
||||
return sz.columns, sz.lines
|
||||
|
||||
# We provide a sensible default for get_winterm_size() when being invoked
|
||||
# inside a subprocess. Without this, it would not provide a useful input.
|
||||
if get_winterm_size is not None:
|
||||
size = get_winterm_size()
|
||||
if size == (0, 0):
|
||||
return (79, 24)
|
||||
else:
|
||||
return size
|
||||
|
||||
def ioctl_gwinsz(fd):
|
||||
try:
|
||||
import fcntl
|
||||
import termios
|
||||
cr = struct.unpack(
|
||||
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
|
||||
except Exception:
|
||||
return
|
||||
return cr
|
||||
|
||||
cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
try:
|
||||
cr = ioctl_gwinsz(fd)
|
||||
finally:
|
||||
os.close(fd)
|
||||
except Exception:
|
||||
pass
|
||||
if not cr or not cr[0] or not cr[1]:
|
||||
cr = (os.environ.get('LINES', 25),
|
||||
os.environ.get('COLUMNS', DEFAULT_COLUMNS))
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
def echo_via_pager(text_or_generator, color=None):
|
||||
"""This function takes a text and shows it via an environment specific
|
||||
pager on stdout.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Added the `color` flag.
|
||||
|
||||
:param text_or_generator: the text to page, or alternatively, a
|
||||
generator emitting the text to page.
|
||||
:param color: controls if the pager supports ANSI colors or not. The
|
||||
default is autodetection.
|
||||
"""
|
||||
color = resolve_color_default(color)
|
||||
|
||||
if inspect.isgeneratorfunction(text_or_generator):
|
||||
i = text_or_generator()
|
||||
elif isinstance(text_or_generator, string_types):
|
||||
i = [text_or_generator]
|
||||
else:
|
||||
i = iter(text_or_generator)
|
||||
|
||||
# convert every element of i to a text type if necessary
|
||||
text_generator = (el if isinstance(el, string_types) else text_type(el)
|
||||
for el in i)
|
||||
|
||||
from ._termui_impl import pager
|
||||
return pager(itertools.chain(text_generator, "\n"), color)
|
||||
|
||||
|
||||
def progressbar(iterable=None, length=None, label=None, show_eta=True,
|
||||
show_percent=None, show_pos=False,
|
||||
item_show_func=None, fill_char='#', empty_char='-',
|
||||
bar_template='%(label)s [%(bar)s] %(info)s',
|
||||
info_sep=' ', width=36, file=None, color=None):
|
||||
"""This function creates an iterable context manager that can be used
|
||||
to iterate over something while showing a progress bar. It will
|
||||
either iterate over the `iterable` or `length` items (that are counted
|
||||
up). While iteration happens, this function will print a rendered
|
||||
progress bar to the given `file` (defaults to stdout) and will attempt
|
||||
to calculate remaining time and more. By default, this progress bar
|
||||
will not be rendered if the file is not a terminal.
|
||||
|
||||
The context manager creates the progress bar. When the context
|
||||
manager is entered the progress bar is already displayed. With every
|
||||
iteration over the progress bar, the iterable passed to the bar is
|
||||
advanced and the bar is updated. When the context manager exits,
|
||||
a newline is printed and the progress bar is finalized on screen.
|
||||
|
||||
No printing must happen or the progress bar will be unintentionally
|
||||
destroyed.
|
||||
|
||||
Example usage::
|
||||
|
||||
with progressbar(items) as bar:
|
||||
for item in bar:
|
||||
do_something_with(item)
|
||||
|
||||
Alternatively, if no iterable is specified, one can manually update the
|
||||
progress bar through the `update()` method instead of directly
|
||||
iterating over the progress bar. The update method accepts the number
|
||||
of steps to increment the bar with::
|
||||
|
||||
with progressbar(length=chunks.total_bytes) as bar:
|
||||
for chunk in chunks:
|
||||
process_chunk(chunk)
|
||||
bar.update(chunks.bytes)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `color` parameter. Added a `update` method to the
|
||||
progressbar object.
|
||||
|
||||
:param iterable: an iterable to iterate over. If not provided the length
|
||||
is required.
|
||||
:param length: the number of items to iterate over. By default the
|
||||
progressbar will attempt to ask the iterator about its
|
||||
length, which might or might not work. If an iterable is
|
||||
also provided this parameter can be used to override the
|
||||
length. If an iterable is not provided the progress bar
|
||||
will iterate over a range of that length.
|
||||
:param label: the label to show next to the progress bar.
|
||||
:param show_eta: enables or disables the estimated time display. This is
|
||||
automatically disabled if the length cannot be
|
||||
determined.
|
||||
:param show_percent: enables or disables the percentage display. The
|
||||
default is `True` if the iterable has a length or
|
||||
`False` if not.
|
||||
:param show_pos: enables or disables the absolute position display. The
|
||||
default is `False`.
|
||||
:param item_show_func: a function called with the current item which
|
||||
can return a string to show the current item
|
||||
next to the progress bar. Note that the current
|
||||
item can be `None`!
|
||||
:param fill_char: the character to use to show the filled part of the
|
||||
progress bar.
|
||||
:param empty_char: the character to use to show the non-filled part of
|
||||
the progress bar.
|
||||
:param bar_template: the format string to use as template for the bar.
|
||||
The parameters in it are ``label`` for the label,
|
||||
``bar`` for the progress bar and ``info`` for the
|
||||
info section.
|
||||
:param info_sep: the separator between multiple info items (eta etc.)
|
||||
:param width: the width of the progress bar in characters, 0 means full
|
||||
terminal width
|
||||
:param file: the file to write to. If this is not a terminal then
|
||||
only the label is printed.
|
||||
:param color: controls if the terminal supports ANSI colors or not. The
|
||||
default is autodetection. This is only needed if ANSI
|
||||
codes are included anywhere in the progress bar output
|
||||
which is not the case by default.
|
||||
"""
|
||||
from ._termui_impl import ProgressBar
|
||||
color = resolve_color_default(color)
|
||||
return ProgressBar(iterable=iterable, length=length, show_eta=show_eta,
|
||||
show_percent=show_percent, show_pos=show_pos,
|
||||
item_show_func=item_show_func, fill_char=fill_char,
|
||||
empty_char=empty_char, bar_template=bar_template,
|
||||
info_sep=info_sep, file=file, label=label,
|
||||
width=width, color=color)
|
||||
|
||||
|
||||
def clear():
|
||||
"""Clears the terminal screen. This will have the effect of clearing
|
||||
the whole visible space of the terminal and moving the cursor to the
|
||||
top left. This does not do anything if not connected to a terminal.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isatty(sys.stdout):
|
||||
return
|
||||
# If we're on Windows and we don't have colorama available, then we
|
||||
# clear the screen by shelling out. Otherwise we can use an escape
|
||||
# sequence.
|
||||
if WIN:
|
||||
os.system('cls')
|
||||
else:
|
||||
sys.stdout.write('\033[2J\033[1;1H')
|
||||
|
||||
|
||||
def style(text, fg=None, bg=None, bold=None, dim=None, underline=None,
|
||||
blink=None, reverse=None, reset=True):
|
||||
"""Styles a text with ANSI styles and returns the new string. By
|
||||
default the styling is self contained which means that at the end
|
||||
of the string a reset code is issued. This can be prevented by
|
||||
passing ``reset=False``.
|
||||
|
||||
Examples::
|
||||
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
click.echo(click.style('ATTENTION!', blink=True))
|
||||
click.echo(click.style('Some things', reverse=True, fg='cyan'))
|
||||
|
||||
Supported color names:
|
||||
|
||||
* ``black`` (might be a gray)
|
||||
* ``red``
|
||||
* ``green``
|
||||
* ``yellow`` (might be an orange)
|
||||
* ``blue``
|
||||
* ``magenta``
|
||||
* ``cyan``
|
||||
* ``white`` (might be light gray)
|
||||
* ``bright_black``
|
||||
* ``bright_red``
|
||||
* ``bright_green``
|
||||
* ``bright_yellow``
|
||||
* ``bright_blue``
|
||||
* ``bright_magenta``
|
||||
* ``bright_cyan``
|
||||
* ``bright_white``
|
||||
* ``reset`` (reset the color code only)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 7.0
|
||||
Added support for bright colors.
|
||||
|
||||
:param text: the string to style with ansi codes.
|
||||
:param fg: if provided this will become the foreground color.
|
||||
:param bg: if provided this will become the background color.
|
||||
:param bold: if provided this will enable or disable bold mode.
|
||||
:param dim: if provided this will enable or disable dim mode. This is
|
||||
badly supported.
|
||||
:param underline: if provided this will enable or disable underline.
|
||||
:param blink: if provided this will enable or disable blinking.
|
||||
:param reverse: if provided this will enable or disable inverse
|
||||
rendering (foreground becomes background and the
|
||||
other way round).
|
||||
:param reset: by default a reset-all code is added at the end of the
|
||||
string which means that styles do not carry over. This
|
||||
can be disabled to compose styles.
|
||||
"""
|
||||
bits = []
|
||||
if fg:
|
||||
try:
|
||||
bits.append('\033[%dm' % (_ansi_colors[fg]))
|
||||
except KeyError:
|
||||
raise TypeError('Unknown color %r' % fg)
|
||||
if bg:
|
||||
try:
|
||||
bits.append('\033[%dm' % (_ansi_colors[bg] + 10))
|
||||
except KeyError:
|
||||
raise TypeError('Unknown color %r' % bg)
|
||||
if bold is not None:
|
||||
bits.append('\033[%dm' % (1 if bold else 22))
|
||||
if dim is not None:
|
||||
bits.append('\033[%dm' % (2 if dim else 22))
|
||||
if underline is not None:
|
||||
bits.append('\033[%dm' % (4 if underline else 24))
|
||||
if blink is not None:
|
||||
bits.append('\033[%dm' % (5 if blink else 25))
|
||||
if reverse is not None:
|
||||
bits.append('\033[%dm' % (7 if reverse else 27))
|
||||
bits.append(text)
|
||||
if reset:
|
||||
bits.append(_ansi_reset_all)
|
||||
return ''.join(bits)
|
||||
|
||||
|
||||
def unstyle(text):
|
||||
"""Removes ANSI styling information from a string. Usually it's not
|
||||
necessary to use this function as Click's echo function will
|
||||
automatically remove styling if necessary.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param text: the text to remove style information from.
|
||||
"""
|
||||
return strip_ansi(text)
|
||||
|
||||
|
||||
def secho(message=None, file=None, nl=True, err=False, color=None, **styles):
|
||||
"""This function combines :func:`echo` and :func:`style` into one
|
||||
call. As such the following two calls are the same::
|
||||
|
||||
click.secho('Hello World!', fg='green')
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
|
||||
All keyword arguments are forwarded to the underlying functions
|
||||
depending on which one they go with.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if message is not None:
|
||||
message = style(message, **styles)
|
||||
return echo(message, file=file, nl=nl, err=err, color=color)
|
||||
|
||||
|
||||
def edit(text=None, editor=None, env=None, require_save=True,
|
||||
extension='.txt', filename=None):
|
||||
r"""Edits the given text in the defined editor. If an editor is given
|
||||
(should be the full path to the executable but the regular operating
|
||||
system search path is used for finding the executable) it overrides
|
||||
the detected editor. Optionally, some environment variables can be
|
||||
used. If the editor is closed without changes, `None` is returned. In
|
||||
case a file is edited directly the return value is always `None` and
|
||||
`require_save` and `extension` are ignored.
|
||||
|
||||
If the editor cannot be opened a :exc:`UsageError` is raised.
|
||||
|
||||
Note for Windows: to simplify cross-platform usage, the newlines are
|
||||
automatically converted from POSIX to Windows and vice versa. As such,
|
||||
the message here will have ``\n`` as newline markers.
|
||||
|
||||
:param text: the text to edit.
|
||||
:param editor: optionally the editor to use. Defaults to automatic
|
||||
detection.
|
||||
:param env: environment variables to forward to the editor.
|
||||
:param require_save: if this is true, then not saving in the editor
|
||||
will make the return value become `None`.
|
||||
:param extension: the extension to tell the editor about. This defaults
|
||||
to `.txt` but changing this might change syntax
|
||||
highlighting.
|
||||
:param filename: if provided it will edit this file instead of the
|
||||
provided text contents. It will not use a temporary
|
||||
file as an indirection in that case.
|
||||
"""
|
||||
from ._termui_impl import Editor
|
||||
editor = Editor(editor=editor, env=env, require_save=require_save,
|
||||
extension=extension)
|
||||
if filename is None:
|
||||
return editor.edit(text)
|
||||
editor.edit_file(filename)
|
||||
|
||||
|
||||
def launch(url, wait=False, locate=False):
|
||||
"""This function launches the given URL (or filename) in the default
|
||||
viewer application for this file type. If this is an executable, it
|
||||
might launch the executable in a new session. The return value is
|
||||
the exit code of the launched application. Usually, ``0`` indicates
|
||||
success.
|
||||
|
||||
Examples::
|
||||
|
||||
click.launch('https://click.palletsprojects.com/')
|
||||
click.launch('/my/downloaded/file', locate=True)
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param url: URL or filename of the thing to launch.
|
||||
:param wait: waits for the program to stop.
|
||||
:param locate: if this is set to `True` then instead of launching the
|
||||
application associated with the URL it will attempt to
|
||||
launch a file manager with the file located. This
|
||||
might have weird effects if the URL does not point to
|
||||
the filesystem.
|
||||
"""
|
||||
from ._termui_impl import open_url
|
||||
return open_url(url, wait=wait, locate=locate)
|
||||
|
||||
|
||||
# If this is provided, getchar() calls into this instead. This is used
|
||||
# for unittesting purposes.
|
||||
_getchar = None
|
||||
|
||||
|
||||
def getchar(echo=False):
|
||||
"""Fetches a single character from the terminal and returns it. This
|
||||
will always return a unicode character and under certain rare
|
||||
circumstances this might return more than one character. The
|
||||
situations which more than one character is returned is when for
|
||||
whatever reason multiple characters end up in the terminal buffer or
|
||||
standard input was not actually a terminal.
|
||||
|
||||
Note that this will always read from the terminal, even if something
|
||||
is piped into the standard input.
|
||||
|
||||
Note for Windows: in rare cases when typing non-ASCII characters, this
|
||||
function might wait for a second character and then return both at once.
|
||||
This is because certain Unicode characters look like special-key markers.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param echo: if set to `True`, the character read will also show up on
|
||||
the terminal. The default is to not show it.
|
||||
"""
|
||||
f = _getchar
|
||||
if f is None:
|
||||
from ._termui_impl import getchar as f
|
||||
return f(echo)
|
||||
|
||||
|
||||
def raw_terminal():
|
||||
from ._termui_impl import raw_terminal as f
|
||||
return f()
|
||||
|
||||
|
||||
def pause(info='Press any key to continue ...', err=False):
|
||||
"""This command stops execution and waits for the user to press any
|
||||
key to continue. This is similar to the Windows batch "pause"
|
||||
command. If the program is not run through a terminal, this command
|
||||
will instead do nothing.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. versionadded:: 4.0
|
||||
Added the `err` parameter.
|
||||
|
||||
:param info: the info string to print before pausing.
|
||||
:param err: if set to message goes to ``stderr`` instead of
|
||||
``stdout``, the same as with echo.
|
||||
"""
|
||||
if not isatty(sys.stdin) or not isatty(sys.stdout):
|
||||
return
|
||||
try:
|
||||
if info:
|
||||
echo(info, nl=False, err=err)
|
||||
try:
|
||||
getchar()
|
||||
except (KeyboardInterrupt, EOFError):
|
||||
pass
|
||||
finally:
|
||||
if info:
|
||||
echo(err=err)
|
||||
@@ -1,374 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import contextlib
|
||||
import shlex
|
||||
|
||||
from ._compat import iteritems, PY2, string_types
|
||||
|
||||
|
||||
# If someone wants to vendor click, we want to ensure the
|
||||
# correct package is discovered. Ideally we could use a
|
||||
# relative import here but unfortunately Python does not
|
||||
# support that.
|
||||
clickpkg = sys.modules[__name__.rsplit('.', 1)[0]]
|
||||
|
||||
|
||||
if PY2:
|
||||
from cStringIO import StringIO
|
||||
else:
|
||||
import io
|
||||
from ._compat import _find_binary_reader
|
||||
|
||||
|
||||
class EchoingStdin(object):
|
||||
|
||||
def __init__(self, input, output):
|
||||
self._input = input
|
||||
self._output = output
|
||||
|
||||
def __getattr__(self, x):
|
||||
return getattr(self._input, x)
|
||||
|
||||
def _echo(self, rv):
|
||||
self._output.write(rv)
|
||||
return rv
|
||||
|
||||
def read(self, n=-1):
|
||||
return self._echo(self._input.read(n))
|
||||
|
||||
def readline(self, n=-1):
|
||||
return self._echo(self._input.readline(n))
|
||||
|
||||
def readlines(self):
|
||||
return [self._echo(x) for x in self._input.readlines()]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._echo(x) for x in self._input)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._input)
|
||||
|
||||
|
||||
def make_input_stream(input, charset):
|
||||
# Is already an input stream.
|
||||
if hasattr(input, 'read'):
|
||||
if PY2:
|
||||
return input
|
||||
rv = _find_binary_reader(input)
|
||||
if rv is not None:
|
||||
return rv
|
||||
raise TypeError('Could not find binary reader for input stream.')
|
||||
|
||||
if input is None:
|
||||
input = b''
|
||||
elif not isinstance(input, bytes):
|
||||
input = input.encode(charset)
|
||||
if PY2:
|
||||
return StringIO(input)
|
||||
return io.BytesIO(input)
|
||||
|
||||
|
||||
class Result(object):
|
||||
"""Holds the captured result of an invoked CLI script."""
|
||||
|
||||
def __init__(self, runner, stdout_bytes, stderr_bytes, exit_code,
|
||||
exception, exc_info=None):
|
||||
#: The runner that created the result
|
||||
self.runner = runner
|
||||
#: The standard output as bytes.
|
||||
self.stdout_bytes = stdout_bytes
|
||||
#: The standard error as bytes, or False(y) if not available
|
||||
self.stderr_bytes = stderr_bytes
|
||||
#: The exit code as integer.
|
||||
self.exit_code = exit_code
|
||||
#: The exception that happened if one did.
|
||||
self.exception = exception
|
||||
#: The traceback
|
||||
self.exc_info = exc_info
|
||||
|
||||
@property
|
||||
def output(self):
|
||||
"""The (standard) output as unicode string."""
|
||||
return self.stdout
|
||||
|
||||
@property
|
||||
def stdout(self):
|
||||
"""The standard output as unicode string."""
|
||||
return self.stdout_bytes.decode(self.runner.charset, 'replace') \
|
||||
.replace('\r\n', '\n')
|
||||
|
||||
@property
|
||||
def stderr(self):
|
||||
"""The standard error as unicode string."""
|
||||
if not self.stderr_bytes:
|
||||
raise ValueError("stderr not separately captured")
|
||||
return self.stderr_bytes.decode(self.runner.charset, 'replace') \
|
||||
.replace('\r\n', '\n')
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s>' % (
|
||||
type(self).__name__,
|
||||
self.exception and repr(self.exception) or 'okay',
|
||||
)
|
||||
|
||||
|
||||
class CliRunner(object):
|
||||
"""The CLI runner provides functionality to invoke a Click command line
|
||||
script for unittesting purposes in a isolated environment. This only
|
||||
works in single-threaded systems without any concurrency as it changes the
|
||||
global interpreter state.
|
||||
|
||||
:param charset: the character set for the input and output data. This is
|
||||
UTF-8 by default and should not be changed currently as
|
||||
the reporting to Click only works in Python 2 properly.
|
||||
:param env: a dictionary with environment variables for overriding.
|
||||
:param echo_stdin: if this is set to `True`, then reading from stdin writes
|
||||
to stdout. This is useful for showing examples in
|
||||
some circumstances. Note that regular prompts
|
||||
will automatically echo the input.
|
||||
:param mix_stderr: if this is set to `False`, then stdout and stderr are
|
||||
preserved as independent streams. This is useful for
|
||||
Unix-philosophy apps that have predictable stdout and
|
||||
noisy stderr, such that each may be measured
|
||||
independently
|
||||
"""
|
||||
|
||||
def __init__(self, charset=None, env=None, echo_stdin=False,
|
||||
mix_stderr=True):
|
||||
if charset is None:
|
||||
charset = 'utf-8'
|
||||
self.charset = charset
|
||||
self.env = env or {}
|
||||
self.echo_stdin = echo_stdin
|
||||
self.mix_stderr = mix_stderr
|
||||
|
||||
def get_default_prog_name(self, cli):
|
||||
"""Given a command object it will return the default program name
|
||||
for it. The default is the `name` attribute or ``"root"`` if not
|
||||
set.
|
||||
"""
|
||||
return cli.name or 'root'
|
||||
|
||||
def make_env(self, overrides=None):
|
||||
"""Returns the environment overrides for invoking a script."""
|
||||
rv = dict(self.env)
|
||||
if overrides:
|
||||
rv.update(overrides)
|
||||
return rv
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolation(self, input=None, env=None, color=False):
|
||||
"""A context manager that sets up the isolation for invoking of a
|
||||
command line tool. This sets up stdin with the given input data
|
||||
and `os.environ` with the overrides from the given dictionary.
|
||||
This also rebinds some internals in Click to be mocked (like the
|
||||
prompt functionality).
|
||||
|
||||
This is automatically done in the :meth:`invoke` method.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
The ``color`` parameter was added.
|
||||
|
||||
:param input: the input stream to put into sys.stdin.
|
||||
:param env: the environment overrides as dictionary.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
"""
|
||||
input = make_input_stream(input, self.charset)
|
||||
|
||||
old_stdin = sys.stdin
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
old_forced_width = clickpkg.formatting.FORCED_WIDTH
|
||||
clickpkg.formatting.FORCED_WIDTH = 80
|
||||
|
||||
env = self.make_env(env)
|
||||
|
||||
if PY2:
|
||||
bytes_output = StringIO()
|
||||
if self.echo_stdin:
|
||||
input = EchoingStdin(input, bytes_output)
|
||||
sys.stdout = bytes_output
|
||||
if not self.mix_stderr:
|
||||
bytes_error = StringIO()
|
||||
sys.stderr = bytes_error
|
||||
else:
|
||||
bytes_output = io.BytesIO()
|
||||
if self.echo_stdin:
|
||||
input = EchoingStdin(input, bytes_output)
|
||||
input = io.TextIOWrapper(input, encoding=self.charset)
|
||||
sys.stdout = io.TextIOWrapper(
|
||||
bytes_output, encoding=self.charset)
|
||||
if not self.mix_stderr:
|
||||
bytes_error = io.BytesIO()
|
||||
sys.stderr = io.TextIOWrapper(
|
||||
bytes_error, encoding=self.charset)
|
||||
|
||||
if self.mix_stderr:
|
||||
sys.stderr = sys.stdout
|
||||
|
||||
sys.stdin = input
|
||||
|
||||
def visible_input(prompt=None):
|
||||
sys.stdout.write(prompt or '')
|
||||
val = input.readline().rstrip('\r\n')
|
||||
sys.stdout.write(val + '\n')
|
||||
sys.stdout.flush()
|
||||
return val
|
||||
|
||||
def hidden_input(prompt=None):
|
||||
sys.stdout.write((prompt or '') + '\n')
|
||||
sys.stdout.flush()
|
||||
return input.readline().rstrip('\r\n')
|
||||
|
||||
def _getchar(echo):
|
||||
char = sys.stdin.read(1)
|
||||
if echo:
|
||||
sys.stdout.write(char)
|
||||
sys.stdout.flush()
|
||||
return char
|
||||
|
||||
default_color = color
|
||||
|
||||
def should_strip_ansi(stream=None, color=None):
|
||||
if color is None:
|
||||
return not default_color
|
||||
return not color
|
||||
|
||||
old_visible_prompt_func = clickpkg.termui.visible_prompt_func
|
||||
old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func
|
||||
old__getchar_func = clickpkg.termui._getchar
|
||||
old_should_strip_ansi = clickpkg.utils.should_strip_ansi
|
||||
clickpkg.termui.visible_prompt_func = visible_input
|
||||
clickpkg.termui.hidden_prompt_func = hidden_input
|
||||
clickpkg.termui._getchar = _getchar
|
||||
clickpkg.utils.should_strip_ansi = should_strip_ansi
|
||||
|
||||
old_env = {}
|
||||
try:
|
||||
for key, value in iteritems(env):
|
||||
old_env[key] = os.environ.get(key)
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
yield (bytes_output, not self.mix_stderr and bytes_error)
|
||||
finally:
|
||||
for key, value in iteritems(old_env):
|
||||
if value is None:
|
||||
try:
|
||||
del os.environ[key]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
os.environ[key] = value
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
sys.stdin = old_stdin
|
||||
clickpkg.termui.visible_prompt_func = old_visible_prompt_func
|
||||
clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func
|
||||
clickpkg.termui._getchar = old__getchar_func
|
||||
clickpkg.utils.should_strip_ansi = old_should_strip_ansi
|
||||
clickpkg.formatting.FORCED_WIDTH = old_forced_width
|
||||
|
||||
def invoke(self, cli, args=None, input=None, env=None,
|
||||
catch_exceptions=True, color=False, mix_stderr=False, **extra):
|
||||
"""Invokes a command in an isolated environment. The arguments are
|
||||
forwarded directly to the command line script, the `extra` keyword
|
||||
arguments are passed to the :meth:`~clickpkg.Command.main` function of
|
||||
the command.
|
||||
|
||||
This returns a :class:`Result` object.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
The ``catch_exceptions`` parameter was added.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The result object now has an `exc_info` attribute with the
|
||||
traceback if available.
|
||||
|
||||
.. versionadded:: 4.0
|
||||
The ``color`` parameter was added.
|
||||
|
||||
:param cli: the command to invoke
|
||||
:param args: the arguments to invoke. It may be given as an iterable
|
||||
or a string. When given as string it will be interpreted
|
||||
as a Unix shell command. More details at
|
||||
:func:`shlex.split`.
|
||||
:param input: the input data for `sys.stdin`.
|
||||
:param env: the environment overrides.
|
||||
:param catch_exceptions: Whether to catch any other exceptions than
|
||||
``SystemExit``.
|
||||
:param extra: the keyword arguments to pass to :meth:`main`.
|
||||
:param color: whether the output should contain color codes. The
|
||||
application can still override this explicitly.
|
||||
"""
|
||||
exc_info = None
|
||||
with self.isolation(input=input, env=env, color=color) as outstreams:
|
||||
exception = None
|
||||
exit_code = 0
|
||||
|
||||
if isinstance(args, string_types):
|
||||
args = shlex.split(args)
|
||||
|
||||
try:
|
||||
prog_name = extra.pop("prog_name")
|
||||
except KeyError:
|
||||
prog_name = self.get_default_prog_name(cli)
|
||||
|
||||
try:
|
||||
cli.main(args=args or (), prog_name=prog_name, **extra)
|
||||
except SystemExit as e:
|
||||
exc_info = sys.exc_info()
|
||||
exit_code = e.code
|
||||
if exit_code is None:
|
||||
exit_code = 0
|
||||
|
||||
if exit_code != 0:
|
||||
exception = e
|
||||
|
||||
if not isinstance(exit_code, int):
|
||||
sys.stdout.write(str(exit_code))
|
||||
sys.stdout.write('\n')
|
||||
exit_code = 1
|
||||
|
||||
except Exception as e:
|
||||
if not catch_exceptions:
|
||||
raise
|
||||
exception = e
|
||||
exit_code = 1
|
||||
exc_info = sys.exc_info()
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
stdout = outstreams[0].getvalue()
|
||||
stderr = outstreams[1] and outstreams[1].getvalue()
|
||||
|
||||
return Result(runner=self,
|
||||
stdout_bytes=stdout,
|
||||
stderr_bytes=stderr,
|
||||
exit_code=exit_code,
|
||||
exception=exception,
|
||||
exc_info=exc_info)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def isolated_filesystem(self):
|
||||
"""A context manager that creates a temporary folder and changes
|
||||
the current working directory to it for isolated filesystem tests.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
t = tempfile.mkdtemp()
|
||||
os.chdir(t)
|
||||
try:
|
||||
yield t
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
try:
|
||||
shutil.rmtree(t)
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
@@ -1,668 +0,0 @@
|
||||
import os
|
||||
import stat
|
||||
from datetime import datetime
|
||||
|
||||
from ._compat import open_stream, text_type, filename_to_ui, \
|
||||
get_filesystem_encoding, get_streerror, _get_argv_encoding, PY2
|
||||
from .exceptions import BadParameter
|
||||
from .utils import safecall, LazyFile
|
||||
|
||||
|
||||
class ParamType(object):
|
||||
"""Helper for converting values through types. The following is
|
||||
necessary for a valid type:
|
||||
|
||||
* it needs a name
|
||||
* it needs to pass through None unchanged
|
||||
* it needs to convert from a string
|
||||
* it needs to convert its result type through unchanged
|
||||
(eg: needs to be idempotent)
|
||||
* it needs to be able to deal with param and context being `None`.
|
||||
This can be the case when the object is used with prompt
|
||||
inputs.
|
||||
"""
|
||||
is_composite = False
|
||||
|
||||
#: the descriptive name of this type
|
||||
name = None
|
||||
|
||||
#: if a list of this type is expected and the value is pulled from a
|
||||
#: string environment variable, this is what splits it up. `None`
|
||||
#: means any whitespace. For all parameters the general rule is that
|
||||
#: whitespace splits them up. The exception are paths and files which
|
||||
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
|
||||
#: Windows).
|
||||
envvar_list_splitter = None
|
||||
|
||||
def __call__(self, value, param=None, ctx=None):
|
||||
if value is not None:
|
||||
return self.convert(value, param, ctx)
|
||||
|
||||
def get_metavar(self, param):
|
||||
"""Returns the metavar default for this param if it provides one."""
|
||||
|
||||
def get_missing_message(self, param):
|
||||
"""Optionally might return extra information about a missing
|
||||
parameter.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
"""Converts the value. This is not invoked for values that are
|
||||
`None` (the missing value).
|
||||
"""
|
||||
return value
|
||||
|
||||
def split_envvar_value(self, rv):
|
||||
"""Given a value from an environment variable this splits it up
|
||||
into small chunks depending on the defined envvar list splitter.
|
||||
|
||||
If the splitter is set to `None`, which means that whitespace splits,
|
||||
then leading and trailing whitespace is ignored. Otherwise, leading
|
||||
and trailing splitters usually lead to empty items being included.
|
||||
"""
|
||||
return (rv or '').split(self.envvar_list_splitter)
|
||||
|
||||
def fail(self, message, param=None, ctx=None):
|
||||
"""Helper method to fail with an invalid value message."""
|
||||
raise BadParameter(message, ctx=ctx, param=param)
|
||||
|
||||
|
||||
class CompositeParamType(ParamType):
|
||||
is_composite = True
|
||||
|
||||
@property
|
||||
def arity(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class FuncParamType(ParamType):
|
||||
|
||||
def __init__(self, func):
|
||||
self.name = func.__name__
|
||||
self.func = func
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return self.func(value)
|
||||
except ValueError:
|
||||
try:
|
||||
value = text_type(value)
|
||||
except UnicodeError:
|
||||
value = str(value).decode('utf-8', 'replace')
|
||||
self.fail(value, param, ctx)
|
||||
|
||||
|
||||
class UnprocessedParamType(ParamType):
|
||||
name = 'text'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
return value
|
||||
|
||||
def __repr__(self):
|
||||
return 'UNPROCESSED'
|
||||
|
||||
|
||||
class StringParamType(ParamType):
|
||||
name = 'text'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if isinstance(value, bytes):
|
||||
enc = _get_argv_encoding()
|
||||
try:
|
||||
value = value.decode(enc)
|
||||
except UnicodeError:
|
||||
fs_enc = get_filesystem_encoding()
|
||||
if fs_enc != enc:
|
||||
try:
|
||||
value = value.decode(fs_enc)
|
||||
except UnicodeError:
|
||||
value = value.decode('utf-8', 'replace')
|
||||
return value
|
||||
return value
|
||||
|
||||
def __repr__(self):
|
||||
return 'STRING'
|
||||
|
||||
|
||||
class Choice(ParamType):
|
||||
"""The choice type allows a value to be checked against a fixed set
|
||||
of supported values. All of these values have to be strings.
|
||||
|
||||
You should only pass a list or tuple of choices. Other iterables
|
||||
(like generators) may lead to surprising results.
|
||||
|
||||
See :ref:`choice-opts` for an example.
|
||||
|
||||
:param case_sensitive: Set to false to make choices case
|
||||
insensitive. Defaults to true.
|
||||
"""
|
||||
|
||||
name = 'choice'
|
||||
|
||||
def __init__(self, choices, case_sensitive=True):
|
||||
self.choices = choices
|
||||
self.case_sensitive = case_sensitive
|
||||
|
||||
def get_metavar(self, param):
|
||||
return '[%s]' % '|'.join(self.choices)
|
||||
|
||||
def get_missing_message(self, param):
|
||||
return 'Choose from:\n\t%s.' % ',\n\t'.join(self.choices)
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# Exact match
|
||||
if value in self.choices:
|
||||
return value
|
||||
|
||||
# Match through normalization and case sensitivity
|
||||
# first do token_normalize_func, then lowercase
|
||||
# preserve original `value` to produce an accurate message in
|
||||
# `self.fail`
|
||||
normed_value = value
|
||||
normed_choices = self.choices
|
||||
|
||||
if ctx is not None and \
|
||||
ctx.token_normalize_func is not None:
|
||||
normed_value = ctx.token_normalize_func(value)
|
||||
normed_choices = [ctx.token_normalize_func(choice) for choice in
|
||||
self.choices]
|
||||
|
||||
if not self.case_sensitive:
|
||||
normed_value = normed_value.lower()
|
||||
normed_choices = [choice.lower() for choice in normed_choices]
|
||||
|
||||
if normed_value in normed_choices:
|
||||
return normed_value
|
||||
|
||||
self.fail('invalid choice: %s. (choose from %s)' %
|
||||
(value, ', '.join(self.choices)), param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return 'Choice(%r)' % list(self.choices)
|
||||
|
||||
|
||||
class DateTime(ParamType):
|
||||
"""The DateTime type converts date strings into `datetime` objects.
|
||||
|
||||
The format strings which are checked are configurable, but default to some
|
||||
common (non-timezone aware) ISO 8601 formats.
|
||||
|
||||
When specifying *DateTime* formats, you should only pass a list or a tuple.
|
||||
Other iterables, like generators, may lead to surprising results.
|
||||
|
||||
The format strings are processed using ``datetime.strptime``, and this
|
||||
consequently defines the format strings which are allowed.
|
||||
|
||||
Parsing is tried using each format, in order, and the first format which
|
||||
parses successfully is used.
|
||||
|
||||
:param formats: A list or tuple of date format strings, in the order in
|
||||
which they should be tried. Defaults to
|
||||
``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
|
||||
``'%Y-%m-%d %H:%M:%S'``.
|
||||
"""
|
||||
name = 'datetime'
|
||||
|
||||
def __init__(self, formats=None):
|
||||
self.formats = formats or [
|
||||
'%Y-%m-%d',
|
||||
'%Y-%m-%dT%H:%M:%S',
|
||||
'%Y-%m-%d %H:%M:%S'
|
||||
]
|
||||
|
||||
def get_metavar(self, param):
|
||||
return '[{}]'.format('|'.join(self.formats))
|
||||
|
||||
def _try_to_convert_date(self, value, format):
|
||||
try:
|
||||
return datetime.strptime(value, format)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
# Exact match
|
||||
for format in self.formats:
|
||||
dtime = self._try_to_convert_date(value, format)
|
||||
if dtime:
|
||||
return dtime
|
||||
|
||||
self.fail(
|
||||
'invalid datetime format: {}. (choose from {})'.format(
|
||||
value, ', '.join(self.formats)))
|
||||
|
||||
def __repr__(self):
|
||||
return 'DateTime'
|
||||
|
||||
|
||||
class IntParamType(ParamType):
|
||||
name = 'integer'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return int(value)
|
||||
except (ValueError, UnicodeError):
|
||||
self.fail('%s is not a valid integer' % value, param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return 'INT'
|
||||
|
||||
|
||||
class IntRange(IntParamType):
|
||||
"""A parameter that works similar to :data:`click.INT` but restricts
|
||||
the value to fit into a range. The default behavior is to fail if the
|
||||
value falls outside the range, but it can also be silently clamped
|
||||
between the two edges.
|
||||
|
||||
See :ref:`ranges` for an example.
|
||||
"""
|
||||
name = 'integer range'
|
||||
|
||||
def __init__(self, min=None, max=None, clamp=False):
|
||||
self.min = min
|
||||
self.max = max
|
||||
self.clamp = clamp
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
rv = IntParamType.convert(self, value, param, ctx)
|
||||
if self.clamp:
|
||||
if self.min is not None and rv < self.min:
|
||||
return self.min
|
||||
if self.max is not None and rv > self.max:
|
||||
return self.max
|
||||
if self.min is not None and rv < self.min or \
|
||||
self.max is not None and rv > self.max:
|
||||
if self.min is None:
|
||||
self.fail('%s is bigger than the maximum valid value '
|
||||
'%s.' % (rv, self.max), param, ctx)
|
||||
elif self.max is None:
|
||||
self.fail('%s is smaller than the minimum valid value '
|
||||
'%s.' % (rv, self.min), param, ctx)
|
||||
else:
|
||||
self.fail('%s is not in the valid range of %s to %s.'
|
||||
% (rv, self.min, self.max), param, ctx)
|
||||
return rv
|
||||
|
||||
def __repr__(self):
|
||||
return 'IntRange(%r, %r)' % (self.min, self.max)
|
||||
|
||||
|
||||
class FloatParamType(ParamType):
|
||||
name = 'float'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return float(value)
|
||||
except (UnicodeError, ValueError):
|
||||
self.fail('%s is not a valid floating point value' %
|
||||
value, param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return 'FLOAT'
|
||||
|
||||
|
||||
class FloatRange(FloatParamType):
|
||||
"""A parameter that works similar to :data:`click.FLOAT` but restricts
|
||||
the value to fit into a range. The default behavior is to fail if the
|
||||
value falls outside the range, but it can also be silently clamped
|
||||
between the two edges.
|
||||
|
||||
See :ref:`ranges` for an example.
|
||||
"""
|
||||
name = 'float range'
|
||||
|
||||
def __init__(self, min=None, max=None, clamp=False):
|
||||
self.min = min
|
||||
self.max = max
|
||||
self.clamp = clamp
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
rv = FloatParamType.convert(self, value, param, ctx)
|
||||
if self.clamp:
|
||||
if self.min is not None and rv < self.min:
|
||||
return self.min
|
||||
if self.max is not None and rv > self.max:
|
||||
return self.max
|
||||
if self.min is not None and rv < self.min or \
|
||||
self.max is not None and rv > self.max:
|
||||
if self.min is None:
|
||||
self.fail('%s is bigger than the maximum valid value '
|
||||
'%s.' % (rv, self.max), param, ctx)
|
||||
elif self.max is None:
|
||||
self.fail('%s is smaller than the minimum valid value '
|
||||
'%s.' % (rv, self.min), param, ctx)
|
||||
else:
|
||||
self.fail('%s is not in the valid range of %s to %s.'
|
||||
% (rv, self.min, self.max), param, ctx)
|
||||
return rv
|
||||
|
||||
def __repr__(self):
|
||||
return 'FloatRange(%r, %r)' % (self.min, self.max)
|
||||
|
||||
|
||||
class BoolParamType(ParamType):
|
||||
name = 'boolean'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if isinstance(value, bool):
|
||||
return bool(value)
|
||||
value = value.lower()
|
||||
if value in ('true', 't', '1', 'yes', 'y'):
|
||||
return True
|
||||
elif value in ('false', 'f', '0', 'no', 'n'):
|
||||
return False
|
||||
self.fail('%s is not a valid boolean' % value, param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return 'BOOL'
|
||||
|
||||
|
||||
class UUIDParameterType(ParamType):
|
||||
name = 'uuid'
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
import uuid
|
||||
try:
|
||||
if PY2 and isinstance(value, text_type):
|
||||
value = value.encode('ascii')
|
||||
return uuid.UUID(value)
|
||||
except (UnicodeError, ValueError):
|
||||
self.fail('%s is not a valid UUID value' % value, param, ctx)
|
||||
|
||||
def __repr__(self):
|
||||
return 'UUID'
|
||||
|
||||
|
||||
class File(ParamType):
|
||||
"""Declares a parameter to be a file for reading or writing. The file
|
||||
is automatically closed once the context tears down (after the command
|
||||
finished working).
|
||||
|
||||
Files can be opened for reading or writing. The special value ``-``
|
||||
indicates stdin or stdout depending on the mode.
|
||||
|
||||
By default, the file is opened for reading text data, but it can also be
|
||||
opened in binary mode or for writing. The encoding parameter can be used
|
||||
to force a specific encoding.
|
||||
|
||||
The `lazy` flag controls if the file should be opened immediately or upon
|
||||
first IO. The default is to be non-lazy for standard input and output
|
||||
streams as well as files opened for reading, `lazy` otherwise. When opening a
|
||||
file lazily for reading, it is still opened temporarily for validation, but
|
||||
will not be held open until first IO. lazy is mainly useful when opening
|
||||
for writing to avoid creating the file until it is needed.
|
||||
|
||||
Starting with Click 2.0, files can also be opened atomically in which
|
||||
case all writes go into a separate file in the same folder and upon
|
||||
completion the file will be moved over to the original location. This
|
||||
is useful if a file regularly read by other users is modified.
|
||||
|
||||
See :ref:`file-args` for more information.
|
||||
"""
|
||||
name = 'filename'
|
||||
envvar_list_splitter = os.path.pathsep
|
||||
|
||||
def __init__(self, mode='r', encoding=None, errors='strict', lazy=None,
|
||||
atomic=False):
|
||||
self.mode = mode
|
||||
self.encoding = encoding
|
||||
self.errors = errors
|
||||
self.lazy = lazy
|
||||
self.atomic = atomic
|
||||
|
||||
def resolve_lazy_flag(self, value):
|
||||
if self.lazy is not None:
|
||||
return self.lazy
|
||||
if value == '-':
|
||||
return False
|
||||
elif 'w' in self.mode:
|
||||
return True
|
||||
return False
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
if hasattr(value, 'read') or hasattr(value, 'write'):
|
||||
return value
|
||||
|
||||
lazy = self.resolve_lazy_flag(value)
|
||||
|
||||
if lazy:
|
||||
f = LazyFile(value, self.mode, self.encoding, self.errors,
|
||||
atomic=self.atomic)
|
||||
if ctx is not None:
|
||||
ctx.call_on_close(f.close_intelligently)
|
||||
return f
|
||||
|
||||
f, should_close = open_stream(value, self.mode,
|
||||
self.encoding, self.errors,
|
||||
atomic=self.atomic)
|
||||
# If a context is provided, we automatically close the file
|
||||
# at the end of the context execution (or flush out). If a
|
||||
# context does not exist, it's the caller's responsibility to
|
||||
# properly close the file. This for instance happens when the
|
||||
# type is used with prompts.
|
||||
if ctx is not None:
|
||||
if should_close:
|
||||
ctx.call_on_close(safecall(f.close))
|
||||
else:
|
||||
ctx.call_on_close(safecall(f.flush))
|
||||
return f
|
||||
except (IOError, OSError) as e:
|
||||
self.fail('Could not open file: %s: %s' % (
|
||||
filename_to_ui(value),
|
||||
get_streerror(e),
|
||||
), param, ctx)
|
||||
|
||||
|
||||
class Path(ParamType):
|
||||
"""The path type is similar to the :class:`File` type but it performs
|
||||
different checks. First of all, instead of returning an open file
|
||||
handle it returns just the filename. Secondly, it can perform various
|
||||
basic checks about what the file or directory should be.
|
||||
|
||||
.. versionchanged:: 6.0
|
||||
`allow_dash` was added.
|
||||
|
||||
:param exists: if set to true, the file or directory needs to exist for
|
||||
this value to be valid. If this is not required and a
|
||||
file does indeed not exist, then all further checks are
|
||||
silently skipped.
|
||||
:param file_okay: controls if a file is a possible value.
|
||||
:param dir_okay: controls if a directory is a possible value.
|
||||
:param writable: if true, a writable check is performed.
|
||||
:param readable: if true, a readable check is performed.
|
||||
:param resolve_path: if this is true, then the path is fully resolved
|
||||
before the value is passed onwards. This means
|
||||
that it's absolute and symlinks are resolved. It
|
||||
will not expand a tilde-prefix, as this is
|
||||
supposed to be done by the shell only.
|
||||
:param allow_dash: If this is set to `True`, a single dash to indicate
|
||||
standard streams is permitted.
|
||||
:param path_type: optionally a string type that should be used to
|
||||
represent the path. The default is `None` which
|
||||
means the return value will be either bytes or
|
||||
unicode depending on what makes most sense given the
|
||||
input data Click deals with.
|
||||
"""
|
||||
envvar_list_splitter = os.path.pathsep
|
||||
|
||||
def __init__(self, exists=False, file_okay=True, dir_okay=True,
|
||||
writable=False, readable=True, resolve_path=False,
|
||||
allow_dash=False, path_type=None):
|
||||
self.exists = exists
|
||||
self.file_okay = file_okay
|
||||
self.dir_okay = dir_okay
|
||||
self.writable = writable
|
||||
self.readable = readable
|
||||
self.resolve_path = resolve_path
|
||||
self.allow_dash = allow_dash
|
||||
self.type = path_type
|
||||
|
||||
if self.file_okay and not self.dir_okay:
|
||||
self.name = 'file'
|
||||
self.path_type = 'File'
|
||||
elif self.dir_okay and not self.file_okay:
|
||||
self.name = 'directory'
|
||||
self.path_type = 'Directory'
|
||||
else:
|
||||
self.name = 'path'
|
||||
self.path_type = 'Path'
|
||||
|
||||
def coerce_path_result(self, rv):
|
||||
if self.type is not None and not isinstance(rv, self.type):
|
||||
if self.type is text_type:
|
||||
rv = rv.decode(get_filesystem_encoding())
|
||||
else:
|
||||
rv = rv.encode(get_filesystem_encoding())
|
||||
return rv
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
rv = value
|
||||
|
||||
is_dash = self.file_okay and self.allow_dash and rv in (b'-', '-')
|
||||
|
||||
if not is_dash:
|
||||
if self.resolve_path:
|
||||
rv = os.path.realpath(rv)
|
||||
|
||||
try:
|
||||
st = os.stat(rv)
|
||||
except OSError:
|
||||
if not self.exists:
|
||||
return self.coerce_path_result(rv)
|
||||
self.fail('%s "%s" does not exist.' % (
|
||||
self.path_type,
|
||||
filename_to_ui(value)
|
||||
), param, ctx)
|
||||
|
||||
if not self.file_okay and stat.S_ISREG(st.st_mode):
|
||||
self.fail('%s "%s" is a file.' % (
|
||||
self.path_type,
|
||||
filename_to_ui(value)
|
||||
), param, ctx)
|
||||
if not self.dir_okay and stat.S_ISDIR(st.st_mode):
|
||||
self.fail('%s "%s" is a directory.' % (
|
||||
self.path_type,
|
||||
filename_to_ui(value)
|
||||
), param, ctx)
|
||||
if self.writable and not os.access(value, os.W_OK):
|
||||
self.fail('%s "%s" is not writable.' % (
|
||||
self.path_type,
|
||||
filename_to_ui(value)
|
||||
), param, ctx)
|
||||
if self.readable and not os.access(value, os.R_OK):
|
||||
self.fail('%s "%s" is not readable.' % (
|
||||
self.path_type,
|
||||
filename_to_ui(value)
|
||||
), param, ctx)
|
||||
|
||||
return self.coerce_path_result(rv)
|
||||
|
||||
|
||||
class Tuple(CompositeParamType):
|
||||
"""The default behavior of Click is to apply a type on a value directly.
|
||||
This works well in most cases, except for when `nargs` is set to a fixed
|
||||
count and different types should be used for different items. In this
|
||||
case the :class:`Tuple` type can be used. This type can only be used
|
||||
if `nargs` is set to a fixed number.
|
||||
|
||||
For more information see :ref:`tuple-type`.
|
||||
|
||||
This can be selected by using a Python tuple literal as a type.
|
||||
|
||||
:param types: a list of types that should be used for the tuple items.
|
||||
"""
|
||||
|
||||
def __init__(self, types):
|
||||
self.types = [convert_type(ty) for ty in types]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "<" + " ".join(ty.name for ty in self.types) + ">"
|
||||
|
||||
@property
|
||||
def arity(self):
|
||||
return len(self.types)
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
if len(value) != len(self.types):
|
||||
raise TypeError('It would appear that nargs is set to conflict '
|
||||
'with the composite type arity.')
|
||||
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
|
||||
|
||||
|
||||
def convert_type(ty, default=None):
|
||||
"""Converts a callable or python ty into the most appropriate param
|
||||
ty.
|
||||
"""
|
||||
guessed_type = False
|
||||
if ty is None and default is not None:
|
||||
if isinstance(default, tuple):
|
||||
ty = tuple(map(type, default))
|
||||
else:
|
||||
ty = type(default)
|
||||
guessed_type = True
|
||||
|
||||
if isinstance(ty, tuple):
|
||||
return Tuple(ty)
|
||||
if isinstance(ty, ParamType):
|
||||
return ty
|
||||
if ty is text_type or ty is str or ty is None:
|
||||
return STRING
|
||||
if ty is int:
|
||||
return INT
|
||||
# Booleans are only okay if not guessed. This is done because for
|
||||
# flags the default value is actually a bit of a lie in that it
|
||||
# indicates which of the flags is the one we want. See get_default()
|
||||
# for more information.
|
||||
if ty is bool and not guessed_type:
|
||||
return BOOL
|
||||
if ty is float:
|
||||
return FLOAT
|
||||
if guessed_type:
|
||||
return STRING
|
||||
|
||||
# Catch a common mistake
|
||||
if __debug__:
|
||||
try:
|
||||
if issubclass(ty, ParamType):
|
||||
raise AssertionError('Attempted to use an uninstantiated '
|
||||
'parameter type (%s).' % ty)
|
||||
except TypeError:
|
||||
pass
|
||||
return FuncParamType(ty)
|
||||
|
||||
|
||||
#: A dummy parameter type that just does nothing. From a user's
|
||||
#: perspective this appears to just be the same as `STRING` but internally
|
||||
#: no string conversion takes place. This is necessary to achieve the
|
||||
#: same bytes/unicode behavior on Python 2/3 in situations where you want
|
||||
#: to not convert argument types. This is usually useful when working
|
||||
#: with file paths as they can appear in bytes and unicode.
|
||||
#:
|
||||
#: For path related uses the :class:`Path` type is a better choice but
|
||||
#: there are situations where an unprocessed type is useful which is why
|
||||
#: it is is provided.
|
||||
#:
|
||||
#: .. versionadded:: 4.0
|
||||
UNPROCESSED = UnprocessedParamType()
|
||||
|
||||
#: A unicode string parameter type which is the implicit default. This
|
||||
#: can also be selected by using ``str`` as type.
|
||||
STRING = StringParamType()
|
||||
|
||||
#: An integer parameter. This can also be selected by using ``int`` as
|
||||
#: type.
|
||||
INT = IntParamType()
|
||||
|
||||
#: A floating point value parameter. This can also be selected by using
|
||||
#: ``float`` as type.
|
||||
FLOAT = FloatParamType()
|
||||
|
||||
#: A boolean parameter. This is the default for boolean flags. This can
|
||||
#: also be selected by using ``bool`` as a type.
|
||||
BOOL = BoolParamType()
|
||||
|
||||
#: A UUID parameter.
|
||||
UUID = UUIDParameterType()
|
||||
@@ -1,440 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from .globals import resolve_color_default
|
||||
|
||||
from ._compat import text_type, open_stream, get_filesystem_encoding, \
|
||||
get_streerror, string_types, PY2, binary_streams, text_streams, \
|
||||
filename_to_ui, auto_wrap_for_ansi, strip_ansi, should_strip_ansi, \
|
||||
_default_text_stdout, _default_text_stderr, is_bytes, WIN
|
||||
|
||||
if not PY2:
|
||||
from ._compat import _find_binary_writer
|
||||
elif WIN:
|
||||
from ._winconsole import _get_windows_argv, \
|
||||
_hash_py_argv, _initial_argv_hash
|
||||
|
||||
|
||||
echo_native_types = string_types + (bytes, bytearray)
|
||||
|
||||
|
||||
def _posixify(name):
|
||||
return '-'.join(name.split()).lower()
|
||||
|
||||
|
||||
def safecall(func):
|
||||
"""Wraps a function so that it swallows exceptions."""
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
return wrapper
|
||||
|
||||
|
||||
def make_str(value):
|
||||
"""Converts a value into a valid string."""
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
return value.decode(get_filesystem_encoding())
|
||||
except UnicodeError:
|
||||
return value.decode('utf-8', 'replace')
|
||||
return text_type(value)
|
||||
|
||||
|
||||
def make_default_short_help(help, max_length=45):
|
||||
"""Return a condensed version of help string."""
|
||||
words = help.split()
|
||||
total_length = 0
|
||||
result = []
|
||||
done = False
|
||||
|
||||
for word in words:
|
||||
if word[-1:] == '.':
|
||||
done = True
|
||||
new_length = result and 1 + len(word) or len(word)
|
||||
if total_length + new_length > max_length:
|
||||
result.append('...')
|
||||
done = True
|
||||
else:
|
||||
if result:
|
||||
result.append(' ')
|
||||
result.append(word)
|
||||
if done:
|
||||
break
|
||||
total_length += new_length
|
||||
|
||||
return ''.join(result)
|
||||
|
||||
|
||||
class LazyFile(object):
|
||||
"""A lazy file works like a regular file but it does not fully open
|
||||
the file but it does perform some basic checks early to see if the
|
||||
filename parameter does make sense. This is useful for safely opening
|
||||
files for writing.
|
||||
"""
|
||||
|
||||
def __init__(self, filename, mode='r', encoding=None, errors='strict',
|
||||
atomic=False):
|
||||
self.name = filename
|
||||
self.mode = mode
|
||||
self.encoding = encoding
|
||||
self.errors = errors
|
||||
self.atomic = atomic
|
||||
|
||||
if filename == '-':
|
||||
self._f, self.should_close = open_stream(filename, mode,
|
||||
encoding, errors)
|
||||
else:
|
||||
if 'r' in mode:
|
||||
# Open and close the file in case we're opening it for
|
||||
# reading so that we can catch at least some errors in
|
||||
# some cases early.
|
||||
open(filename, mode).close()
|
||||
self._f = None
|
||||
self.should_close = True
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.open(), name)
|
||||
|
||||
def __repr__(self):
|
||||
if self._f is not None:
|
||||
return repr(self._f)
|
||||
return '<unopened file %r %s>' % (self.name, self.mode)
|
||||
|
||||
def open(self):
|
||||
"""Opens the file if it's not yet open. This call might fail with
|
||||
a :exc:`FileError`. Not handling this error will produce an error
|
||||
that Click shows.
|
||||
"""
|
||||
if self._f is not None:
|
||||
return self._f
|
||||
try:
|
||||
rv, self.should_close = open_stream(self.name, self.mode,
|
||||
self.encoding,
|
||||
self.errors,
|
||||
atomic=self.atomic)
|
||||
except (IOError, OSError) as e:
|
||||
from .exceptions import FileError
|
||||
raise FileError(self.name, hint=get_streerror(e))
|
||||
self._f = rv
|
||||
return rv
|
||||
|
||||
def close(self):
|
||||
"""Closes the underlying file, no matter what."""
|
||||
if self._f is not None:
|
||||
self._f.close()
|
||||
|
||||
def close_intelligently(self):
|
||||
"""This function only closes the file if it was opened by the lazy
|
||||
file wrapper. For instance this will never close stdin.
|
||||
"""
|
||||
if self.should_close:
|
||||
self.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.close_intelligently()
|
||||
|
||||
def __iter__(self):
|
||||
self.open()
|
||||
return iter(self._f)
|
||||
|
||||
|
||||
class KeepOpenFile(object):
|
||||
|
||||
def __init__(self, file):
|
||||
self._file = file
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._file, name)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._file)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._file)
|
||||
|
||||
|
||||
def echo(message=None, file=None, nl=True, err=False, color=None):
|
||||
"""Prints a message plus a newline to the given file or stdout. On
|
||||
first sight, this looks like the print function, but it has improved
|
||||
support for handling Unicode and binary data that does not fail no
|
||||
matter how badly configured the system is.
|
||||
|
||||
Primarily it means that you can print binary data as well as Unicode
|
||||
data on both 2.x and 3.x to the given file in the most appropriate way
|
||||
possible. This is a very carefree function in that it will try its
|
||||
best to not fail. As of Click 6.0 this includes support for unicode
|
||||
output on the Windows console.
|
||||
|
||||
In addition to that, if `colorama`_ is installed, the echo function will
|
||||
also support clever handling of ANSI codes. Essentially it will then
|
||||
do the following:
|
||||
|
||||
- add transparent handling of ANSI color codes on Windows.
|
||||
- hide ANSI codes automatically if the destination file is not a
|
||||
terminal.
|
||||
|
||||
.. _colorama: https://pypi.org/project/colorama/
|
||||
|
||||
.. versionchanged:: 6.0
|
||||
As of Click 6.0 the echo function will properly support unicode
|
||||
output on the windows console. Not that click does not modify
|
||||
the interpreter in any way which means that `sys.stdout` or the
|
||||
print statement or function will still not provide unicode support.
|
||||
|
||||
.. versionchanged:: 2.0
|
||||
Starting with version 2.0 of Click, the echo function will work
|
||||
with colorama if it's installed.
|
||||
|
||||
.. versionadded:: 3.0
|
||||
The `err` parameter was added.
|
||||
|
||||
.. versionchanged:: 4.0
|
||||
Added the `color` flag.
|
||||
|
||||
:param message: the message to print
|
||||
:param file: the file to write to (defaults to ``stdout``)
|
||||
:param err: if set to true the file defaults to ``stderr`` instead of
|
||||
``stdout``. This is faster and easier than calling
|
||||
:func:`get_text_stderr` yourself.
|
||||
:param nl: if set to `True` (the default) a newline is printed afterwards.
|
||||
:param color: controls if the terminal supports ANSI colors or not. The
|
||||
default is autodetection.
|
||||
"""
|
||||
if file is None:
|
||||
if err:
|
||||
file = _default_text_stderr()
|
||||
else:
|
||||
file = _default_text_stdout()
|
||||
|
||||
# Convert non bytes/text into the native string type.
|
||||
if message is not None and not isinstance(message, echo_native_types):
|
||||
message = text_type(message)
|
||||
|
||||
if nl:
|
||||
message = message or u''
|
||||
if isinstance(message, text_type):
|
||||
message += u'\n'
|
||||
else:
|
||||
message += b'\n'
|
||||
|
||||
# If there is a message, and we're in Python 3, and the value looks
|
||||
# like bytes, we manually need to find the binary stream and write the
|
||||
# message in there. This is done separately so that most stream
|
||||
# types will work as you would expect. Eg: you can write to StringIO
|
||||
# for other cases.
|
||||
if message and not PY2 and is_bytes(message):
|
||||
binary_file = _find_binary_writer(file)
|
||||
if binary_file is not None:
|
||||
file.flush()
|
||||
binary_file.write(message)
|
||||
binary_file.flush()
|
||||
return
|
||||
|
||||
# ANSI-style support. If there is no message or we are dealing with
|
||||
# bytes nothing is happening. If we are connected to a file we want
|
||||
# to strip colors. If we are on windows we either wrap the stream
|
||||
# to strip the color or we use the colorama support to translate the
|
||||
# ansi codes to API calls.
|
||||
if message and not is_bytes(message):
|
||||
color = resolve_color_default(color)
|
||||
if should_strip_ansi(file, color):
|
||||
message = strip_ansi(message)
|
||||
elif WIN:
|
||||
if auto_wrap_for_ansi is not None:
|
||||
file = auto_wrap_for_ansi(file)
|
||||
elif not color:
|
||||
message = strip_ansi(message)
|
||||
|
||||
if message:
|
||||
file.write(message)
|
||||
file.flush()
|
||||
|
||||
|
||||
def get_binary_stream(name):
|
||||
"""Returns a system stream for byte processing. This essentially
|
||||
returns the stream from the sys module with the given name but it
|
||||
solves some compatibility issues between different Python versions.
|
||||
Primarily this function is necessary for getting binary streams on
|
||||
Python 3.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
"""
|
||||
opener = binary_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError('Unknown standard stream %r' % name)
|
||||
return opener()
|
||||
|
||||
|
||||
def get_text_stream(name, encoding=None, errors='strict'):
|
||||
"""Returns a system stream for text processing. This usually returns
|
||||
a wrapped stream around a binary stream returned from
|
||||
:func:`get_binary_stream` but it also can take shortcuts on Python 3
|
||||
for already correctly configured streams.
|
||||
|
||||
:param name: the name of the stream to open. Valid names are ``'stdin'``,
|
||||
``'stdout'`` and ``'stderr'``
|
||||
:param encoding: overrides the detected default encoding.
|
||||
:param errors: overrides the default error mode.
|
||||
"""
|
||||
opener = text_streams.get(name)
|
||||
if opener is None:
|
||||
raise TypeError('Unknown standard stream %r' % name)
|
||||
return opener(encoding, errors)
|
||||
|
||||
|
||||
def open_file(filename, mode='r', encoding=None, errors='strict',
|
||||
lazy=False, atomic=False):
|
||||
"""This is similar to how the :class:`File` works but for manual
|
||||
usage. Files are opened non lazy by default. This can open regular
|
||||
files as well as stdin/stdout if ``'-'`` is passed.
|
||||
|
||||
If stdin/stdout is returned the stream is wrapped so that the context
|
||||
manager will not close the stream accidentally. This makes it possible
|
||||
to always use the function like this without having to worry to
|
||||
accidentally close a standard stream::
|
||||
|
||||
with open_file(filename) as f:
|
||||
...
|
||||
|
||||
.. versionadded:: 3.0
|
||||
|
||||
:param filename: the name of the file to open (or ``'-'`` for stdin/stdout).
|
||||
:param mode: the mode in which to open the file.
|
||||
:param encoding: the encoding to use.
|
||||
:param errors: the error handling for this file.
|
||||
:param lazy: can be flipped to true to open the file lazily.
|
||||
:param atomic: in atomic mode writes go into a temporary file and it's
|
||||
moved on close.
|
||||
"""
|
||||
if lazy:
|
||||
return LazyFile(filename, mode, encoding, errors, atomic=atomic)
|
||||
f, should_close = open_stream(filename, mode, encoding, errors,
|
||||
atomic=atomic)
|
||||
if not should_close:
|
||||
f = KeepOpenFile(f)
|
||||
return f
|
||||
|
||||
|
||||
def get_os_args():
|
||||
"""This returns the argument part of sys.argv in the most appropriate
|
||||
form for processing. What this means is that this return value is in
|
||||
a format that works for Click to process but does not necessarily
|
||||
correspond well to what's actually standard for the interpreter.
|
||||
|
||||
On most environments the return value is ``sys.argv[:1]`` unchanged.
|
||||
However if you are on Windows and running Python 2 the return value
|
||||
will actually be a list of unicode strings instead because the
|
||||
default behavior on that platform otherwise will not be able to
|
||||
carry all possible values that sys.argv can have.
|
||||
|
||||
.. versionadded:: 6.0
|
||||
"""
|
||||
# We can only extract the unicode argv if sys.argv has not been
|
||||
# changed since the startup of the application.
|
||||
if PY2 and WIN and _initial_argv_hash == _hash_py_argv():
|
||||
return _get_windows_argv()
|
||||
return sys.argv[1:]
|
||||
|
||||
|
||||
def format_filename(filename, shorten=False):
|
||||
"""Formats a filename for user display. The main purpose of this
|
||||
function is to ensure that the filename can be displayed at all. This
|
||||
will decode the filename to unicode if necessary in a way that it will
|
||||
not fail. Optionally, it can shorten the filename to not include the
|
||||
full path to the filename.
|
||||
|
||||
:param filename: formats a filename for UI display. This will also convert
|
||||
the filename into unicode without failing.
|
||||
:param shorten: this optionally shortens the filename to strip of the
|
||||
path that leads up to it.
|
||||
"""
|
||||
if shorten:
|
||||
filename = os.path.basename(filename)
|
||||
return filename_to_ui(filename)
|
||||
|
||||
|
||||
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||
the following folders could be returned:
|
||||
|
||||
Mac OS X:
|
||||
``~/Library/Application Support/Foo Bar``
|
||||
Mac OS X (POSIX):
|
||||
``~/.foo-bar``
|
||||
Unix:
|
||||
``~/.config/foo-bar``
|
||||
Unix (POSIX):
|
||||
``~/.foo-bar``
|
||||
Win XP (roaming):
|
||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
|
||||
Win XP (not roaming):
|
||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||
Win 7 (roaming):
|
||||
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||
Win 7 (not roaming):
|
||||
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
:param app_name: the application name. This should be properly capitalized
|
||||
and can contain whitespace.
|
||||
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||
Has no affect otherwise.
|
||||
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||
folder will be stored in the home folder with a leading
|
||||
dot instead of the XDG config home or darwin's
|
||||
application support folder.
|
||||
"""
|
||||
if WIN:
|
||||
key = roaming and 'APPDATA' or 'LOCALAPPDATA'
|
||||
folder = os.environ.get(key)
|
||||
if folder is None:
|
||||
folder = os.path.expanduser('~')
|
||||
return os.path.join(folder, app_name)
|
||||
if force_posix:
|
||||
return os.path.join(os.path.expanduser('~/.' + _posixify(app_name)))
|
||||
if sys.platform == 'darwin':
|
||||
return os.path.join(os.path.expanduser(
|
||||
'~/Library/Application Support'), app_name)
|
||||
return os.path.join(
|
||||
os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')),
|
||||
_posixify(app_name))
|
||||
|
||||
|
||||
class PacifyFlushWrapper(object):
|
||||
"""This wrapper is used to catch and suppress BrokenPipeErrors resulting
|
||||
from ``.flush()`` being called on broken pipe during the shutdown/final-GC
|
||||
of the Python interpreter. Notably ``.flush()`` is always called on
|
||||
``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
|
||||
other cleanup code, and the case where the underlying file is not a broken
|
||||
pipe, all calls and attributes are proxied.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
|
||||
def flush(self):
|
||||
try:
|
||||
self.wrapped.flush()
|
||||
except IOError as e:
|
||||
import errno
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return getattr(self.wrapped, attr)
|
||||
@@ -1,2 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from ._version import VERSION as __version__
|
||||
@@ -1,34 +0,0 @@
|
||||
"""
|
||||
Common code used in multiple modules.
|
||||
"""
|
||||
|
||||
|
||||
class weekday(object):
|
||||
__slots__ = ["weekday", "n"]
|
||||
|
||||
def __init__(self, weekday, n=None):
|
||||
self.weekday = weekday
|
||||
self.n = n
|
||||
|
||||
def __call__(self, n):
|
||||
if n == self.n:
|
||||
return self
|
||||
else:
|
||||
return self.__class__(self.weekday, n)
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
if self.weekday != other.weekday or self.n != other.n:
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
return True
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __repr__(self):
|
||||
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
|
||||
if not self.n:
|
||||
return s
|
||||
else:
|
||||
return "%s(%+d)" % (s, self.n)
|
||||
@@ -1,10 +0,0 @@
|
||||
"""
|
||||
Contains information about the dateutil version.
|
||||
"""
|
||||
|
||||
VERSION_MAJOR = 2
|
||||
VERSION_MINOR = 6
|
||||
VERSION_PATCH = 1
|
||||
|
||||
VERSION_TUPLE = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
|
||||
VERSION = '.'.join(map(str, VERSION_TUPLE))
|
||||
@@ -1,89 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
This module offers a generic easter computing method for any given year, using
|
||||
Western, Orthodox or Julian algorithms.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
|
||||
|
||||
EASTER_JULIAN = 1
|
||||
EASTER_ORTHODOX = 2
|
||||
EASTER_WESTERN = 3
|
||||
|
||||
|
||||
def easter(year, method=EASTER_WESTERN):
|
||||
"""
|
||||
This method was ported from the work done by GM Arts,
|
||||
on top of the algorithm by Claus Tondering, which was
|
||||
based in part on the algorithm of Ouding (1940), as
|
||||
quoted in "Explanatory Supplement to the Astronomical
|
||||
Almanac", P. Kenneth Seidelmann, editor.
|
||||
|
||||
This algorithm implements three different easter
|
||||
calculation methods:
|
||||
|
||||
1 - Original calculation in Julian calendar, valid in
|
||||
dates after 326 AD
|
||||
2 - Original method, with date converted to Gregorian
|
||||
calendar, valid in years 1583 to 4099
|
||||
3 - Revised method, in Gregorian calendar, valid in
|
||||
years 1583 to 4099 as well
|
||||
|
||||
These methods are represented by the constants:
|
||||
|
||||
* ``EASTER_JULIAN = 1``
|
||||
* ``EASTER_ORTHODOX = 2``
|
||||
* ``EASTER_WESTERN = 3``
|
||||
|
||||
The default method is method 3.
|
||||
|
||||
More about the algorithm may be found at:
|
||||
|
||||
http://users.chariot.net.au/~gmarts/eastalg.htm
|
||||
|
||||
and
|
||||
|
||||
http://www.tondering.dk/claus/calendar.html
|
||||
|
||||
"""
|
||||
|
||||
if not (1 <= method <= 3):
|
||||
raise ValueError("invalid method")
|
||||
|
||||
# g - Golden year - 1
|
||||
# c - Century
|
||||
# h - (23 - Epact) mod 30
|
||||
# i - Number of days from March 21 to Paschal Full Moon
|
||||
# j - Weekday for PFM (0=Sunday, etc)
|
||||
# p - Number of days from March 21 to Sunday on or before PFM
|
||||
# (-6 to 28 methods 1 & 3, to 56 for method 2)
|
||||
# e - Extra days to add for method 2 (converting Julian
|
||||
# date to Gregorian date)
|
||||
|
||||
y = year
|
||||
g = y % 19
|
||||
e = 0
|
||||
if method < 3:
|
||||
# Old method
|
||||
i = (19*g + 15) % 30
|
||||
j = (y + y//4 + i) % 7
|
||||
if method == 2:
|
||||
# Extra dates to convert Julian to Gregorian date
|
||||
e = 10
|
||||
if y > 1600:
|
||||
e = e + y//100 - 16 - (y//100 - 16)//4
|
||||
else:
|
||||
# New method
|
||||
c = y//100
|
||||
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
|
||||
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
|
||||
j = (y + y//4 + i + 2 - c + c//4) % 7
|
||||
|
||||
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
|
||||
# (later dates apply to method 2, although 23 May never actually occurs)
|
||||
p = i - j + e
|
||||
d = 1 + (p + 27 + (p + 6)//40) % 31
|
||||
m = 3 + (p + 26)//30
|
||||
return datetime.date(int(y), int(m), int(d))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,549 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import datetime
|
||||
import calendar
|
||||
|
||||
import operator
|
||||
from math import copysign
|
||||
|
||||
from six import integer_types
|
||||
from warnings import warn
|
||||
|
||||
from ._common import weekday
|
||||
|
||||
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
|
||||
|
||||
__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"]
|
||||
|
||||
|
||||
class relativedelta(object):
|
||||
"""
|
||||
The relativedelta type is based on the specification of the excellent
|
||||
work done by M.-A. Lemburg in his
|
||||
`mx.DateTime <http://www.egenix.com/files/python/mxDateTime.html>`_ extension.
|
||||
However, notice that this type does *NOT* implement the same algorithm as
|
||||
his work. Do *NOT* expect it to behave like mx.DateTime's counterpart.
|
||||
|
||||
There are two different ways to build a relativedelta instance. The
|
||||
first one is passing it two date/datetime classes::
|
||||
|
||||
relativedelta(datetime1, datetime2)
|
||||
|
||||
The second one is passing it any number of the following keyword arguments::
|
||||
|
||||
relativedelta(arg1=x,arg2=y,arg3=z...)
|
||||
|
||||
year, month, day, hour, minute, second, microsecond:
|
||||
Absolute information (argument is singular); adding or subtracting a
|
||||
relativedelta with absolute information does not perform an aritmetic
|
||||
operation, but rather REPLACES the corresponding value in the
|
||||
original datetime with the value(s) in relativedelta.
|
||||
|
||||
years, months, weeks, days, hours, minutes, seconds, microseconds:
|
||||
Relative information, may be negative (argument is plural); adding
|
||||
or subtracting a relativedelta with relative information performs
|
||||
the corresponding aritmetic operation on the original datetime value
|
||||
with the information in the relativedelta.
|
||||
|
||||
weekday:
|
||||
One of the weekday instances (MO, TU, etc). These instances may
|
||||
receive a parameter N, specifying the Nth weekday, which could
|
||||
be positive or negative (like MO(+1) or MO(-2). Not specifying
|
||||
it is the same as specifying +1. You can also use an integer,
|
||||
where 0=MO.
|
||||
|
||||
leapdays:
|
||||
Will add given days to the date found, if year is a leap
|
||||
year, and the date found is post 28 of february.
|
||||
|
||||
yearday, nlyearday:
|
||||
Set the yearday or the non-leap year day (jump leap days).
|
||||
These are converted to day/month/leapdays information.
|
||||
|
||||
Here is the behavior of operations with relativedelta:
|
||||
|
||||
1. Calculate the absolute year, using the 'year' argument, or the
|
||||
original datetime year, if the argument is not present.
|
||||
|
||||
2. Add the relative 'years' argument to the absolute year.
|
||||
|
||||
3. Do steps 1 and 2 for month/months.
|
||||
|
||||
4. Calculate the absolute day, using the 'day' argument, or the
|
||||
original datetime day, if the argument is not present. Then,
|
||||
subtract from the day until it fits in the year and month
|
||||
found after their operations.
|
||||
|
||||
5. Add the relative 'days' argument to the absolute day. Notice
|
||||
that the 'weeks' argument is multiplied by 7 and added to
|
||||
'days'.
|
||||
|
||||
6. Do steps 1 and 2 for hour/hours, minute/minutes, second/seconds,
|
||||
microsecond/microseconds.
|
||||
|
||||
7. If the 'weekday' argument is present, calculate the weekday,
|
||||
with the given (wday, nth) tuple. wday is the index of the
|
||||
weekday (0-6, 0=Mon), and nth is the number of weeks to add
|
||||
forward or backward, depending on its signal. Notice that if
|
||||
the calculated date is already Monday, for example, using
|
||||
(0, 1) or (0, -1) won't change the day.
|
||||
"""
|
||||
|
||||
def __init__(self, dt1=None, dt2=None,
|
||||
years=0, months=0, days=0, leapdays=0, weeks=0,
|
||||
hours=0, minutes=0, seconds=0, microseconds=0,
|
||||
year=None, month=None, day=None, weekday=None,
|
||||
yearday=None, nlyearday=None,
|
||||
hour=None, minute=None, second=None, microsecond=None):
|
||||
|
||||
# Check for non-integer values in integer-only quantities
|
||||
if any(x is not None and x != int(x) for x in (years, months)):
|
||||
raise ValueError("Non-integer years and months are "
|
||||
"ambiguous and not currently supported.")
|
||||
|
||||
if dt1 and dt2:
|
||||
# datetime is a subclass of date. So both must be date
|
||||
if not (isinstance(dt1, datetime.date) and
|
||||
isinstance(dt2, datetime.date)):
|
||||
raise TypeError("relativedelta only diffs datetime/date")
|
||||
|
||||
# We allow two dates, or two datetimes, so we coerce them to be
|
||||
# of the same type
|
||||
if (isinstance(dt1, datetime.datetime) !=
|
||||
isinstance(dt2, datetime.datetime)):
|
||||
if not isinstance(dt1, datetime.datetime):
|
||||
dt1 = datetime.datetime.fromordinal(dt1.toordinal())
|
||||
elif not isinstance(dt2, datetime.datetime):
|
||||
dt2 = datetime.datetime.fromordinal(dt2.toordinal())
|
||||
|
||||
self.years = 0
|
||||
self.months = 0
|
||||
self.days = 0
|
||||
self.leapdays = 0
|
||||
self.hours = 0
|
||||
self.minutes = 0
|
||||
self.seconds = 0
|
||||
self.microseconds = 0
|
||||
self.year = None
|
||||
self.month = None
|
||||
self.day = None
|
||||
self.weekday = None
|
||||
self.hour = None
|
||||
self.minute = None
|
||||
self.second = None
|
||||
self.microsecond = None
|
||||
self._has_time = 0
|
||||
|
||||
# Get year / month delta between the two
|
||||
months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month)
|
||||
self._set_months(months)
|
||||
|
||||
# Remove the year/month delta so the timedelta is just well-defined
|
||||
# time units (seconds, days and microseconds)
|
||||
dtm = self.__radd__(dt2)
|
||||
|
||||
# If we've overshot our target, make an adjustment
|
||||
if dt1 < dt2:
|
||||
compare = operator.gt
|
||||
increment = 1
|
||||
else:
|
||||
compare = operator.lt
|
||||
increment = -1
|
||||
|
||||
while compare(dt1, dtm):
|
||||
months += increment
|
||||
self._set_months(months)
|
||||
dtm = self.__radd__(dt2)
|
||||
|
||||
# Get the timedelta between the "months-adjusted" date and dt1
|
||||
delta = dt1 - dtm
|
||||
self.seconds = delta.seconds + delta.days * 86400
|
||||
self.microseconds = delta.microseconds
|
||||
else:
|
||||
# Relative information
|
||||
self.years = years
|
||||
self.months = months
|
||||
self.days = days + weeks * 7
|
||||
self.leapdays = leapdays
|
||||
self.hours = hours
|
||||
self.minutes = minutes
|
||||
self.seconds = seconds
|
||||
self.microseconds = microseconds
|
||||
|
||||
# Absolute information
|
||||
self.year = year
|
||||
self.month = month
|
||||
self.day = day
|
||||
self.hour = hour
|
||||
self.minute = minute
|
||||
self.second = second
|
||||
self.microsecond = microsecond
|
||||
|
||||
if any(x is not None and int(x) != x
|
||||
for x in (year, month, day, hour,
|
||||
minute, second, microsecond)):
|
||||
# For now we'll deprecate floats - later it'll be an error.
|
||||
warn("Non-integer value passed as absolute information. " +
|
||||
"This is not a well-defined condition and will raise " +
|
||||
"errors in future versions.", DeprecationWarning)
|
||||
|
||||
if isinstance(weekday, integer_types):
|
||||
self.weekday = weekdays[weekday]
|
||||
else:
|
||||
self.weekday = weekday
|
||||
|
||||
yday = 0
|
||||
if nlyearday:
|
||||
yday = nlyearday
|
||||
elif yearday:
|
||||
yday = yearday
|
||||
if yearday > 59:
|
||||
self.leapdays = -1
|
||||
if yday:
|
||||
ydayidx = [31, 59, 90, 120, 151, 181, 212,
|
||||
243, 273, 304, 334, 366]
|
||||
for idx, ydays in enumerate(ydayidx):
|
||||
if yday <= ydays:
|
||||
self.month = idx+1
|
||||
if idx == 0:
|
||||
self.day = yday
|
||||
else:
|
||||
self.day = yday-ydayidx[idx-1]
|
||||
break
|
||||
else:
|
||||
raise ValueError("invalid year day (%d)" % yday)
|
||||
|
||||
self._fix()
|
||||
|
||||
def _fix(self):
|
||||
if abs(self.microseconds) > 999999:
|
||||
s = _sign(self.microseconds)
|
||||
div, mod = divmod(self.microseconds * s, 1000000)
|
||||
self.microseconds = mod * s
|
||||
self.seconds += div * s
|
||||
if abs(self.seconds) > 59:
|
||||
s = _sign(self.seconds)
|
||||
div, mod = divmod(self.seconds * s, 60)
|
||||
self.seconds = mod * s
|
||||
self.minutes += div * s
|
||||
if abs(self.minutes) > 59:
|
||||
s = _sign(self.minutes)
|
||||
div, mod = divmod(self.minutes * s, 60)
|
||||
self.minutes = mod * s
|
||||
self.hours += div * s
|
||||
if abs(self.hours) > 23:
|
||||
s = _sign(self.hours)
|
||||
div, mod = divmod(self.hours * s, 24)
|
||||
self.hours = mod * s
|
||||
self.days += div * s
|
||||
if abs(self.months) > 11:
|
||||
s = _sign(self.months)
|
||||
div, mod = divmod(self.months * s, 12)
|
||||
self.months = mod * s
|
||||
self.years += div * s
|
||||
if (self.hours or self.minutes or self.seconds or self.microseconds
|
||||
or self.hour is not None or self.minute is not None or
|
||||
self.second is not None or self.microsecond is not None):
|
||||
self._has_time = 1
|
||||
else:
|
||||
self._has_time = 0
|
||||
|
||||
@property
|
||||
def weeks(self):
|
||||
return self.days // 7
|
||||
|
||||
@weeks.setter
|
||||
def weeks(self, value):
|
||||
self.days = self.days - (self.weeks * 7) + value * 7
|
||||
|
||||
def _set_months(self, months):
|
||||
self.months = months
|
||||
if abs(self.months) > 11:
|
||||
s = _sign(self.months)
|
||||
div, mod = divmod(self.months * s, 12)
|
||||
self.months = mod * s
|
||||
self.years = div * s
|
||||
else:
|
||||
self.years = 0
|
||||
|
||||
def normalized(self):
|
||||
"""
|
||||
Return a version of this object represented entirely using integer
|
||||
values for the relative attributes.
|
||||
|
||||
>>> relativedelta(days=1.5, hours=2).normalized()
|
||||
relativedelta(days=1, hours=14)
|
||||
|
||||
:return:
|
||||
Returns a :class:`dateutil.relativedelta.relativedelta` object.
|
||||
"""
|
||||
# Cascade remainders down (rounding each to roughly nearest microsecond)
|
||||
days = int(self.days)
|
||||
|
||||
hours_f = round(self.hours + 24 * (self.days - days), 11)
|
||||
hours = int(hours_f)
|
||||
|
||||
minutes_f = round(self.minutes + 60 * (hours_f - hours), 10)
|
||||
minutes = int(minutes_f)
|
||||
|
||||
seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8)
|
||||
seconds = int(seconds_f)
|
||||
|
||||
microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds))
|
||||
|
||||
# Constructor carries overflow back up with call to _fix()
|
||||
return self.__class__(years=self.years, months=self.months,
|
||||
days=days, hours=hours, minutes=minutes,
|
||||
seconds=seconds, microseconds=microseconds,
|
||||
leapdays=self.leapdays, year=self.year,
|
||||
month=self.month, day=self.day,
|
||||
weekday=self.weekday, hour=self.hour,
|
||||
minute=self.minute, second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __add__(self, other):
|
||||
if isinstance(other, relativedelta):
|
||||
return self.__class__(years=other.years + self.years,
|
||||
months=other.months + self.months,
|
||||
days=other.days + self.days,
|
||||
hours=other.hours + self.hours,
|
||||
minutes=other.minutes + self.minutes,
|
||||
seconds=other.seconds + self.seconds,
|
||||
microseconds=(other.microseconds +
|
||||
self.microseconds),
|
||||
leapdays=other.leapdays or self.leapdays,
|
||||
year=(other.year if other.year is not None
|
||||
else self.year),
|
||||
month=(other.month if other.month is not None
|
||||
else self.month),
|
||||
day=(other.day if other.day is not None
|
||||
else self.day),
|
||||
weekday=(other.weekday if other.weekday is not None
|
||||
else self.weekday),
|
||||
hour=(other.hour if other.hour is not None
|
||||
else self.hour),
|
||||
minute=(other.minute if other.minute is not None
|
||||
else self.minute),
|
||||
second=(other.second if other.second is not None
|
||||
else self.second),
|
||||
microsecond=(other.microsecond if other.microsecond
|
||||
is not None else
|
||||
self.microsecond))
|
||||
if isinstance(other, datetime.timedelta):
|
||||
return self.__class__(years=self.years,
|
||||
months=self.months,
|
||||
days=self.days + other.days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds + other.seconds,
|
||||
microseconds=self.microseconds + other.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
if not isinstance(other, datetime.date):
|
||||
return NotImplemented
|
||||
elif self._has_time and not isinstance(other, datetime.datetime):
|
||||
other = datetime.datetime.fromordinal(other.toordinal())
|
||||
year = (self.year or other.year)+self.years
|
||||
month = self.month or other.month
|
||||
if self.months:
|
||||
assert 1 <= abs(self.months) <= 12
|
||||
month += self.months
|
||||
if month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
elif month < 1:
|
||||
year -= 1
|
||||
month += 12
|
||||
day = min(calendar.monthrange(year, month)[1],
|
||||
self.day or other.day)
|
||||
repl = {"year": year, "month": month, "day": day}
|
||||
for attr in ["hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
repl[attr] = value
|
||||
days = self.days
|
||||
if self.leapdays and month > 2 and calendar.isleap(year):
|
||||
days += self.leapdays
|
||||
ret = (other.replace(**repl)
|
||||
+ datetime.timedelta(days=days,
|
||||
hours=self.hours,
|
||||
minutes=self.minutes,
|
||||
seconds=self.seconds,
|
||||
microseconds=self.microseconds))
|
||||
if self.weekday:
|
||||
weekday, nth = self.weekday.weekday, self.weekday.n or 1
|
||||
jumpdays = (abs(nth) - 1) * 7
|
||||
if nth > 0:
|
||||
jumpdays += (7 - ret.weekday() + weekday) % 7
|
||||
else:
|
||||
jumpdays += (ret.weekday() - weekday) % 7
|
||||
jumpdays *= -1
|
||||
ret += datetime.timedelta(days=jumpdays)
|
||||
return ret
|
||||
|
||||
def __radd__(self, other):
|
||||
return self.__add__(other)
|
||||
|
||||
def __rsub__(self, other):
|
||||
return self.__neg__().__radd__(other)
|
||||
|
||||
def __sub__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return NotImplemented # In case the other object defines __rsub__
|
||||
return self.__class__(years=self.years - other.years,
|
||||
months=self.months - other.months,
|
||||
days=self.days - other.days,
|
||||
hours=self.hours - other.hours,
|
||||
minutes=self.minutes - other.minutes,
|
||||
seconds=self.seconds - other.seconds,
|
||||
microseconds=self.microseconds - other.microseconds,
|
||||
leapdays=self.leapdays or other.leapdays,
|
||||
year=(self.year if self.year is not None
|
||||
else other.year),
|
||||
month=(self.month if self.month is not None else
|
||||
other.month),
|
||||
day=(self.day if self.day is not None else
|
||||
other.day),
|
||||
weekday=(self.weekday if self.weekday is not None else
|
||||
other.weekday),
|
||||
hour=(self.hour if self.hour is not None else
|
||||
other.hour),
|
||||
minute=(self.minute if self.minute is not None else
|
||||
other.minute),
|
||||
second=(self.second if self.second is not None else
|
||||
other.second),
|
||||
microsecond=(self.microsecond if self.microsecond
|
||||
is not None else
|
||||
other.microsecond))
|
||||
|
||||
def __neg__(self):
|
||||
return self.__class__(years=-self.years,
|
||||
months=-self.months,
|
||||
days=-self.days,
|
||||
hours=-self.hours,
|
||||
minutes=-self.minutes,
|
||||
seconds=-self.seconds,
|
||||
microseconds=-self.microseconds,
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
def __bool__(self):
|
||||
return not (not self.years and
|
||||
not self.months and
|
||||
not self.days and
|
||||
not self.hours and
|
||||
not self.minutes and
|
||||
not self.seconds and
|
||||
not self.microseconds and
|
||||
not self.leapdays and
|
||||
self.year is None and
|
||||
self.month is None and
|
||||
self.day is None and
|
||||
self.weekday is None and
|
||||
self.hour is None and
|
||||
self.minute is None and
|
||||
self.second is None and
|
||||
self.microsecond is None)
|
||||
# Compatibility with Python 2.x
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __mul__(self, other):
|
||||
try:
|
||||
f = float(other)
|
||||
except TypeError:
|
||||
return NotImplemented
|
||||
|
||||
return self.__class__(years=int(self.years * f),
|
||||
months=int(self.months * f),
|
||||
days=int(self.days * f),
|
||||
hours=int(self.hours * f),
|
||||
minutes=int(self.minutes * f),
|
||||
seconds=int(self.seconds * f),
|
||||
microseconds=int(self.microseconds * f),
|
||||
leapdays=self.leapdays,
|
||||
year=self.year,
|
||||
month=self.month,
|
||||
day=self.day,
|
||||
weekday=self.weekday,
|
||||
hour=self.hour,
|
||||
minute=self.minute,
|
||||
second=self.second,
|
||||
microsecond=self.microsecond)
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, relativedelta):
|
||||
return NotImplemented
|
||||
if self.weekday or other.weekday:
|
||||
if not self.weekday or not other.weekday:
|
||||
return False
|
||||
if self.weekday.weekday != other.weekday.weekday:
|
||||
return False
|
||||
n1, n2 = self.weekday.n, other.weekday.n
|
||||
if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)):
|
||||
return False
|
||||
return (self.years == other.years and
|
||||
self.months == other.months and
|
||||
self.days == other.days and
|
||||
self.hours == other.hours and
|
||||
self.minutes == other.minutes and
|
||||
self.seconds == other.seconds and
|
||||
self.microseconds == other.microseconds and
|
||||
self.leapdays == other.leapdays and
|
||||
self.year == other.year and
|
||||
self.month == other.month and
|
||||
self.day == other.day and
|
||||
self.hour == other.hour and
|
||||
self.minute == other.minute and
|
||||
self.second == other.second and
|
||||
self.microsecond == other.microsecond)
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __div__(self, other):
|
||||
try:
|
||||
reciprocal = 1 / float(other)
|
||||
except TypeError:
|
||||
return NotImplemented
|
||||
|
||||
return self.__mul__(reciprocal)
|
||||
|
||||
__truediv__ = __div__
|
||||
|
||||
def __repr__(self):
|
||||
l = []
|
||||
for attr in ["years", "months", "days", "leapdays",
|
||||
"hours", "minutes", "seconds", "microseconds"]:
|
||||
value = getattr(self, attr)
|
||||
if value:
|
||||
l.append("{attr}={value:+g}".format(attr=attr, value=value))
|
||||
for attr in ["year", "month", "day", "weekday",
|
||||
"hour", "minute", "second", "microsecond"]:
|
||||
value = getattr(self, attr)
|
||||
if value is not None:
|
||||
l.append("{attr}={value}".format(attr=attr, value=repr(value)))
|
||||
return "{classname}({attrs})".format(classname=self.__class__.__name__,
|
||||
attrs=", ".join(l))
|
||||
|
||||
|
||||
def _sign(x):
|
||||
return int(copysign(1, x))
|
||||
|
||||
# vim:ts=4:sw=4:et
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
||||
from .tz import *
|
||||
|
||||
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
|
||||
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz",
|
||||
"enfold", "datetime_ambiguous", "datetime_exists"]
|
||||
@@ -1,394 +0,0 @@
|
||||
from six import PY3
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
|
||||
ZERO = timedelta(0)
|
||||
|
||||
__all__ = ['tzname_in_python2', 'enfold']
|
||||
|
||||
|
||||
def tzname_in_python2(namefunc):
|
||||
"""Change unicode output into bytestrings in Python 2
|
||||
|
||||
tzname() API changed in Python 3. It used to return bytes, but was changed
|
||||
to unicode strings
|
||||
"""
|
||||
def adjust_encoding(*args, **kwargs):
|
||||
name = namefunc(*args, **kwargs)
|
||||
if name is not None and not PY3:
|
||||
name = name.encode()
|
||||
|
||||
return name
|
||||
|
||||
return adjust_encoding
|
||||
|
||||
|
||||
# The following is adapted from Alexander Belopolsky's tz library
|
||||
# https://github.com/abalkin/tz
|
||||
if hasattr(datetime, 'fold'):
|
||||
# This is the pre-python 3.6 fold situation
|
||||
def enfold(dt, fold=1):
|
||||
"""
|
||||
Provides a unified interface for assigning the ``fold`` attribute to
|
||||
datetimes both before and after the implementation of PEP-495.
|
||||
|
||||
:param fold:
|
||||
The value for the ``fold`` attribute in the returned datetime. This
|
||||
should be either 0 or 1.
|
||||
|
||||
:return:
|
||||
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
||||
``fold`` for all versions of Python. In versions prior to
|
||||
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
||||
subclass of :py:class:`datetime.datetime` with the ``fold``
|
||||
attribute added, if ``fold`` is 1.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
return dt.replace(fold=fold)
|
||||
|
||||
else:
|
||||
class _DatetimeWithFold(datetime):
|
||||
"""
|
||||
This is a class designed to provide a PEP 495-compliant interface for
|
||||
Python versions before 3.6. It is used only for dates in a fold, so
|
||||
the ``fold`` attribute is fixed at ``1``.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
def fold(self):
|
||||
return 1
|
||||
|
||||
def enfold(dt, fold=1):
|
||||
"""
|
||||
Provides a unified interface for assigning the ``fold`` attribute to
|
||||
datetimes both before and after the implementation of PEP-495.
|
||||
|
||||
:param fold:
|
||||
The value for the ``fold`` attribute in the returned datetime. This
|
||||
should be either 0 or 1.
|
||||
|
||||
:return:
|
||||
Returns an object for which ``getattr(dt, 'fold', 0)`` returns
|
||||
``fold`` for all versions of Python. In versions prior to
|
||||
Python 3.6, this is a ``_DatetimeWithFold`` object, which is a
|
||||
subclass of :py:class:`datetime.datetime` with the ``fold``
|
||||
attribute added, if ``fold`` is 1.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
if getattr(dt, 'fold', 0) == fold:
|
||||
return dt
|
||||
|
||||
args = dt.timetuple()[:6]
|
||||
args += (dt.microsecond, dt.tzinfo)
|
||||
|
||||
if fold:
|
||||
return _DatetimeWithFold(*args)
|
||||
else:
|
||||
return datetime(*args)
|
||||
|
||||
|
||||
def _validate_fromutc_inputs(f):
|
||||
"""
|
||||
The CPython version of ``fromutc`` checks that the input is a ``datetime``
|
||||
object and that ``self`` is attached as its ``tzinfo``.
|
||||
"""
|
||||
@wraps(f)
|
||||
def fromutc(self, dt):
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
return f(self, dt)
|
||||
|
||||
return fromutc
|
||||
|
||||
|
||||
class _tzinfo(tzinfo):
|
||||
"""
|
||||
Base class for all ``dateutil`` ``tzinfo`` objects.
|
||||
"""
|
||||
|
||||
def is_ambiguous(self, dt):
|
||||
"""
|
||||
Whether or not the "wall time" of a given datetime is ambiguous in this
|
||||
zone.
|
||||
|
||||
:param dt:
|
||||
A :py:class:`datetime.datetime`, naive or time zone aware.
|
||||
|
||||
|
||||
:return:
|
||||
Returns ``True`` if ambiguous, ``False`` otherwise.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
|
||||
dt = dt.replace(tzinfo=self)
|
||||
|
||||
wall_0 = enfold(dt, fold=0)
|
||||
wall_1 = enfold(dt, fold=1)
|
||||
|
||||
same_offset = wall_0.utcoffset() == wall_1.utcoffset()
|
||||
same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None)
|
||||
|
||||
return same_dt and not same_offset
|
||||
|
||||
def _fold_status(self, dt_utc, dt_wall):
|
||||
"""
|
||||
Determine the fold status of a "wall" datetime, given a representation
|
||||
of the same datetime as a (naive) UTC datetime. This is calculated based
|
||||
on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all
|
||||
datetimes, and that this offset is the actual number of hours separating
|
||||
``dt_utc`` and ``dt_wall``.
|
||||
|
||||
:param dt_utc:
|
||||
Representation of the datetime as UTC
|
||||
|
||||
:param dt_wall:
|
||||
Representation of the datetime as "wall time". This parameter must
|
||||
either have a `fold` attribute or have a fold-naive
|
||||
:class:`datetime.tzinfo` attached, otherwise the calculation may
|
||||
fail.
|
||||
"""
|
||||
if self.is_ambiguous(dt_wall):
|
||||
delta_wall = dt_wall - dt_utc
|
||||
_fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst()))
|
||||
else:
|
||||
_fold = 0
|
||||
|
||||
return _fold
|
||||
|
||||
def _fold(self, dt):
|
||||
return getattr(dt, 'fold', 0)
|
||||
|
||||
def _fromutc(self, dt):
|
||||
"""
|
||||
Given a timezone-aware datetime in a given timezone, calculates a
|
||||
timezone-aware datetime in a new timezone.
|
||||
|
||||
Since this is the one time that we *know* we have an unambiguous
|
||||
datetime object, we take this opportunity to determine whether the
|
||||
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
||||
occurence, chronologically, of the ambiguous datetime).
|
||||
|
||||
:param dt:
|
||||
A timezone-aware :class:`datetime.datetime` object.
|
||||
"""
|
||||
|
||||
# Re-implement the algorithm from Python's datetime.py
|
||||
dtoff = dt.utcoffset()
|
||||
if dtoff is None:
|
||||
raise ValueError("fromutc() requires a non-None utcoffset() "
|
||||
"result")
|
||||
|
||||
# The original datetime.py code assumes that `dst()` defaults to
|
||||
# zero during ambiguous times. PEP 495 inverts this presumption, so
|
||||
# for pre-PEP 495 versions of python, we need to tweak the algorithm.
|
||||
dtdst = dt.dst()
|
||||
if dtdst is None:
|
||||
raise ValueError("fromutc() requires a non-None dst() result")
|
||||
delta = dtoff - dtdst
|
||||
|
||||
dt += delta
|
||||
# Set fold=1 so we can default to being in the fold for
|
||||
# ambiguous dates.
|
||||
dtdst = enfold(dt, fold=1).dst()
|
||||
if dtdst is None:
|
||||
raise ValueError("fromutc(): dt.dst gave inconsistent "
|
||||
"results; cannot convert")
|
||||
return dt + dtdst
|
||||
|
||||
@_validate_fromutc_inputs
|
||||
def fromutc(self, dt):
|
||||
"""
|
||||
Given a timezone-aware datetime in a given timezone, calculates a
|
||||
timezone-aware datetime in a new timezone.
|
||||
|
||||
Since this is the one time that we *know* we have an unambiguous
|
||||
datetime object, we take this opportunity to determine whether the
|
||||
datetime is ambiguous and in a "fold" state (e.g. if it's the first
|
||||
occurance, chronologically, of the ambiguous datetime).
|
||||
|
||||
:param dt:
|
||||
A timezone-aware :class:`datetime.datetime` object.
|
||||
"""
|
||||
dt_wall = self._fromutc(dt)
|
||||
|
||||
# Calculate the fold status given the two datetimes.
|
||||
_fold = self._fold_status(dt, dt_wall)
|
||||
|
||||
# Set the default fold value for ambiguous dates
|
||||
return enfold(dt_wall, fold=_fold)
|
||||
|
||||
|
||||
class tzrangebase(_tzinfo):
|
||||
"""
|
||||
This is an abstract base class for time zones represented by an annual
|
||||
transition into and out of DST. Child classes should implement the following
|
||||
methods:
|
||||
|
||||
* ``__init__(self, *args, **kwargs)``
|
||||
* ``transitions(self, year)`` - this is expected to return a tuple of
|
||||
datetimes representing the DST on and off transitions in standard
|
||||
time.
|
||||
|
||||
A fully initialized ``tzrangebase`` subclass should also provide the
|
||||
following attributes:
|
||||
* ``hasdst``: Boolean whether or not the zone uses DST.
|
||||
* ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects
|
||||
representing the respective UTC offsets.
|
||||
* ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short
|
||||
abbreviations in DST and STD, respectively.
|
||||
* ``_hasdst``: Whether or not the zone has DST.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
def __init__(self):
|
||||
raise NotImplementedError('tzrangebase is an abstract base class')
|
||||
|
||||
def utcoffset(self, dt):
|
||||
isdst = self._isdst(dt)
|
||||
|
||||
if isdst is None:
|
||||
return None
|
||||
elif isdst:
|
||||
return self._dst_offset
|
||||
else:
|
||||
return self._std_offset
|
||||
|
||||
def dst(self, dt):
|
||||
isdst = self._isdst(dt)
|
||||
|
||||
if isdst is None:
|
||||
return None
|
||||
elif isdst:
|
||||
return self._dst_base_offset
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
@tzname_in_python2
|
||||
def tzname(self, dt):
|
||||
if self._isdst(dt):
|
||||
return self._dst_abbr
|
||||
else:
|
||||
return self._std_abbr
|
||||
|
||||
def fromutc(self, dt):
|
||||
""" Given a datetime in UTC, return local time """
|
||||
if not isinstance(dt, datetime):
|
||||
raise TypeError("fromutc() requires a datetime argument")
|
||||
|
||||
if dt.tzinfo is not self:
|
||||
raise ValueError("dt.tzinfo is not self")
|
||||
|
||||
# Get transitions - if there are none, fixed offset
|
||||
transitions = self.transitions(dt.year)
|
||||
if transitions is None:
|
||||
return dt + self.utcoffset(dt)
|
||||
|
||||
# Get the transition times in UTC
|
||||
dston, dstoff = transitions
|
||||
|
||||
dston -= self._std_offset
|
||||
dstoff -= self._std_offset
|
||||
|
||||
utc_transitions = (dston, dstoff)
|
||||
dt_utc = dt.replace(tzinfo=None)
|
||||
|
||||
isdst = self._naive_isdst(dt_utc, utc_transitions)
|
||||
|
||||
if isdst:
|
||||
dt_wall = dt + self._dst_offset
|
||||
else:
|
||||
dt_wall = dt + self._std_offset
|
||||
|
||||
_fold = int(not isdst and self.is_ambiguous(dt_wall))
|
||||
|
||||
return enfold(dt_wall, fold=_fold)
|
||||
|
||||
def is_ambiguous(self, dt):
|
||||
"""
|
||||
Whether or not the "wall time" of a given datetime is ambiguous in this
|
||||
zone.
|
||||
|
||||
:param dt:
|
||||
A :py:class:`datetime.datetime`, naive or time zone aware.
|
||||
|
||||
|
||||
:return:
|
||||
Returns ``True`` if ambiguous, ``False`` otherwise.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
"""
|
||||
if not self.hasdst:
|
||||
return False
|
||||
|
||||
start, end = self.transitions(dt.year)
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
return (end <= dt < end + self._dst_base_offset)
|
||||
|
||||
def _isdst(self, dt):
|
||||
if not self.hasdst:
|
||||
return False
|
||||
elif dt is None:
|
||||
return None
|
||||
|
||||
transitions = self.transitions(dt.year)
|
||||
|
||||
if transitions is None:
|
||||
return False
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
|
||||
isdst = self._naive_isdst(dt, transitions)
|
||||
|
||||
# Handle ambiguous dates
|
||||
if not isdst and self.is_ambiguous(dt):
|
||||
return not self._fold(dt)
|
||||
else:
|
||||
return isdst
|
||||
|
||||
def _naive_isdst(self, dt, transitions):
|
||||
dston, dstoff = transitions
|
||||
|
||||
dt = dt.replace(tzinfo=None)
|
||||
|
||||
if dston < dstoff:
|
||||
isdst = dston <= dt < dstoff
|
||||
else:
|
||||
isdst = not dstoff <= dt < dston
|
||||
|
||||
return isdst
|
||||
|
||||
@property
|
||||
def _dst_base_offset(self):
|
||||
return self._dst_offset - self._std_offset
|
||||
|
||||
__hash__ = None
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(...)" % self.__class__.__name__
|
||||
|
||||
__reduce__ = object.__reduce__
|
||||
|
||||
|
||||
def _total_seconds(td):
|
||||
# Python 2.6 doesn't have a total_seconds() method on timedelta objects
|
||||
return ((td.seconds + td.days * 86400) * 1000000 +
|
||||
td.microseconds) // 1000000
|
||||
|
||||
|
||||
_total_seconds = getattr(timedelta, 'total_seconds', _total_seconds)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,332 +0,0 @@
|
||||
# This code was originally contributed by Jeffrey Harris.
|
||||
import datetime
|
||||
import struct
|
||||
|
||||
from six.moves import winreg
|
||||
from six import text_type
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
from ctypes import wintypes
|
||||
except ValueError:
|
||||
# ValueError is raised on non-Windows systems for some horrible reason.
|
||||
raise ImportError("Running tzwin on non-Windows system")
|
||||
|
||||
from ._common import tzrangebase
|
||||
|
||||
__all__ = ["tzwin", "tzwinlocal", "tzres"]
|
||||
|
||||
ONEWEEK = datetime.timedelta(7)
|
||||
|
||||
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
|
||||
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
|
||||
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
|
||||
|
||||
|
||||
def _settzkeyname():
|
||||
handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
try:
|
||||
winreg.OpenKey(handle, TZKEYNAMENT).Close()
|
||||
TZKEYNAME = TZKEYNAMENT
|
||||
except WindowsError:
|
||||
TZKEYNAME = TZKEYNAME9X
|
||||
handle.Close()
|
||||
return TZKEYNAME
|
||||
|
||||
|
||||
TZKEYNAME = _settzkeyname()
|
||||
|
||||
|
||||
class tzres(object):
|
||||
"""
|
||||
Class for accessing `tzres.dll`, which contains timezone name related
|
||||
resources.
|
||||
|
||||
.. versionadded:: 2.5.0
|
||||
"""
|
||||
p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char
|
||||
|
||||
def __init__(self, tzres_loc='tzres.dll'):
|
||||
# Load the user32 DLL so we can load strings from tzres
|
||||
user32 = ctypes.WinDLL('user32')
|
||||
|
||||
# Specify the LoadStringW function
|
||||
user32.LoadStringW.argtypes = (wintypes.HINSTANCE,
|
||||
wintypes.UINT,
|
||||
wintypes.LPWSTR,
|
||||
ctypes.c_int)
|
||||
|
||||
self.LoadStringW = user32.LoadStringW
|
||||
self._tzres = ctypes.WinDLL(tzres_loc)
|
||||
self.tzres_loc = tzres_loc
|
||||
|
||||
def load_name(self, offset):
|
||||
"""
|
||||
Load a timezone name from a DLL offset (integer).
|
||||
|
||||
>>> from dateutil.tzwin import tzres
|
||||
>>> tzr = tzres()
|
||||
>>> print(tzr.load_name(112))
|
||||
'Eastern Standard Time'
|
||||
|
||||
:param offset:
|
||||
A positive integer value referring to a string from the tzres dll.
|
||||
|
||||
..note:
|
||||
Offsets found in the registry are generally of the form
|
||||
`@tzres.dll,-114`. The offset in this case if 114, not -114.
|
||||
|
||||
"""
|
||||
resource = self.p_wchar()
|
||||
lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR)
|
||||
nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0)
|
||||
return resource[:nchar]
|
||||
|
||||
def name_from_string(self, tzname_str):
|
||||
"""
|
||||
Parse strings as returned from the Windows registry into the time zone
|
||||
name as defined in the registry.
|
||||
|
||||
>>> from dateutil.tzwin import tzres
|
||||
>>> tzr = tzres()
|
||||
>>> print(tzr.name_from_string('@tzres.dll,-251'))
|
||||
'Dateline Daylight Time'
|
||||
>>> print(tzr.name_from_string('Eastern Standard Time'))
|
||||
'Eastern Standard Time'
|
||||
|
||||
:param tzname_str:
|
||||
A timezone name string as returned from a Windows registry key.
|
||||
|
||||
:return:
|
||||
Returns the localized timezone string from tzres.dll if the string
|
||||
is of the form `@tzres.dll,-offset`, else returns the input string.
|
||||
"""
|
||||
if not tzname_str.startswith('@'):
|
||||
return tzname_str
|
||||
|
||||
name_splt = tzname_str.split(',-')
|
||||
try:
|
||||
offset = int(name_splt[1])
|
||||
except:
|
||||
raise ValueError("Malformed timezone string.")
|
||||
|
||||
return self.load_name(offset)
|
||||
|
||||
|
||||
class tzwinbase(tzrangebase):
|
||||
"""tzinfo class based on win32's timezones available in the registry."""
|
||||
def __init__(self):
|
||||
raise NotImplementedError('tzwinbase is an abstract base class')
|
||||
|
||||
def __eq__(self, other):
|
||||
# Compare on all relevant dimensions, including name.
|
||||
if not isinstance(other, tzwinbase):
|
||||
return NotImplemented
|
||||
|
||||
return (self._std_offset == other._std_offset and
|
||||
self._dst_offset == other._dst_offset and
|
||||
self._stddayofweek == other._stddayofweek and
|
||||
self._dstdayofweek == other._dstdayofweek and
|
||||
self._stdweeknumber == other._stdweeknumber and
|
||||
self._dstweeknumber == other._dstweeknumber and
|
||||
self._stdhour == other._stdhour and
|
||||
self._dsthour == other._dsthour and
|
||||
self._stdminute == other._stdminute and
|
||||
self._dstminute == other._dstminute and
|
||||
self._std_abbr == other._std_abbr and
|
||||
self._dst_abbr == other._dst_abbr)
|
||||
|
||||
@staticmethod
|
||||
def list():
|
||||
"""Return a list of all time zones known to the system."""
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle, TZKEYNAME) as tzkey:
|
||||
result = [winreg.EnumKey(tzkey, i)
|
||||
for i in range(winreg.QueryInfoKey(tzkey)[0])]
|
||||
return result
|
||||
|
||||
def display(self):
|
||||
return self._display
|
||||
|
||||
def transitions(self, year):
|
||||
"""
|
||||
For a given year, get the DST on and off transition times, expressed
|
||||
always on the standard time side. For zones with no transitions, this
|
||||
function returns ``None``.
|
||||
|
||||
:param year:
|
||||
The year whose transitions you would like to query.
|
||||
|
||||
:return:
|
||||
Returns a :class:`tuple` of :class:`datetime.datetime` objects,
|
||||
``(dston, dstoff)`` for zones with an annual DST transition, or
|
||||
``None`` for fixed offset zones.
|
||||
"""
|
||||
|
||||
if not self.hasdst:
|
||||
return None
|
||||
|
||||
dston = picknthweekday(year, self._dstmonth, self._dstdayofweek,
|
||||
self._dsthour, self._dstminute,
|
||||
self._dstweeknumber)
|
||||
|
||||
dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek,
|
||||
self._stdhour, self._stdminute,
|
||||
self._stdweeknumber)
|
||||
|
||||
# Ambiguous dates default to the STD side
|
||||
dstoff -= self._dst_base_offset
|
||||
|
||||
return dston, dstoff
|
||||
|
||||
def _get_hasdst(self):
|
||||
return self._dstmonth != 0
|
||||
|
||||
@property
|
||||
def _dst_base_offset(self):
|
||||
return self._dst_base_offset_
|
||||
|
||||
|
||||
class tzwin(tzwinbase):
|
||||
|
||||
def __init__(self, name):
|
||||
self._name = name
|
||||
|
||||
# multiple contexts only possible in 2.7 and 3.1, we still support 2.6
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name)
|
||||
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
||||
keydict = valuestodict(tzkey)
|
||||
|
||||
self._std_abbr = keydict["Std"]
|
||||
self._dst_abbr = keydict["Dlt"]
|
||||
|
||||
self._display = keydict["Display"]
|
||||
|
||||
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
|
||||
tup = struct.unpack("=3l16h", keydict["TZI"])
|
||||
stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
|
||||
dstoffset = stdoffset-tup[2] # + DaylightBias * -1
|
||||
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
||||
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
||||
|
||||
# for the meaning see the win32 TIME_ZONE_INFORMATION structure docs
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx
|
||||
(self._stdmonth,
|
||||
self._stddayofweek, # Sunday = 0
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[4:9]
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstdayofweek, # Sunday = 0
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[12:17]
|
||||
|
||||
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
||||
self.hasdst = self._get_hasdst()
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwin(%s)" % repr(self._name)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, (self._name,))
|
||||
|
||||
|
||||
class tzwinlocal(tzwinbase):
|
||||
def __init__(self):
|
||||
with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle:
|
||||
with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey:
|
||||
keydict = valuestodict(tzlocalkey)
|
||||
|
||||
self._std_abbr = keydict["StandardName"]
|
||||
self._dst_abbr = keydict["DaylightName"]
|
||||
|
||||
try:
|
||||
tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME,
|
||||
sn=self._std_abbr)
|
||||
with winreg.OpenKey(handle, tzkeyname) as tzkey:
|
||||
_keydict = valuestodict(tzkey)
|
||||
self._display = _keydict["Display"]
|
||||
except OSError:
|
||||
self._display = None
|
||||
|
||||
stdoffset = -keydict["Bias"]-keydict["StandardBias"]
|
||||
dstoffset = stdoffset-keydict["DaylightBias"]
|
||||
|
||||
self._std_offset = datetime.timedelta(minutes=stdoffset)
|
||||
self._dst_offset = datetime.timedelta(minutes=dstoffset)
|
||||
|
||||
# For reasons unclear, in this particular key, the day of week has been
|
||||
# moved to the END of the SYSTEMTIME structure.
|
||||
tup = struct.unpack("=8h", keydict["StandardStart"])
|
||||
|
||||
(self._stdmonth,
|
||||
self._stdweeknumber, # Last = 5
|
||||
self._stdhour,
|
||||
self._stdminute) = tup[1:5]
|
||||
|
||||
self._stddayofweek = tup[7]
|
||||
|
||||
tup = struct.unpack("=8h", keydict["DaylightStart"])
|
||||
|
||||
(self._dstmonth,
|
||||
self._dstweeknumber, # Last = 5
|
||||
self._dsthour,
|
||||
self._dstminute) = tup[1:5]
|
||||
|
||||
self._dstdayofweek = tup[7]
|
||||
|
||||
self._dst_base_offset_ = self._dst_offset - self._std_offset
|
||||
self.hasdst = self._get_hasdst()
|
||||
|
||||
def __repr__(self):
|
||||
return "tzwinlocal()"
|
||||
|
||||
def __str__(self):
|
||||
# str will return the standard name, not the daylight name.
|
||||
return "tzwinlocal(%s)" % repr(self._std_abbr)
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, ())
|
||||
|
||||
|
||||
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
|
||||
""" dayofweek == 0 means Sunday, whichweek 5 means last instance """
|
||||
first = datetime.datetime(year, month, 1, hour, minute)
|
||||
|
||||
# This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6),
|
||||
# Because 7 % 7 = 0
|
||||
weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1)
|
||||
wd = weekdayone + ((whichweek - 1) * ONEWEEK)
|
||||
if (wd.month != month):
|
||||
wd -= ONEWEEK
|
||||
|
||||
return wd
|
||||
|
||||
|
||||
def valuestodict(key):
|
||||
"""Convert a registry key's values to a dictionary."""
|
||||
dout = {}
|
||||
size = winreg.QueryInfoKey(key)[1]
|
||||
tz_res = None
|
||||
|
||||
for i in range(size):
|
||||
key_name, value, dtype = winreg.EnumValue(key, i)
|
||||
if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN:
|
||||
# If it's a DWORD (32-bit integer), it's stored as unsigned - convert
|
||||
# that to a proper signed integer
|
||||
if value & (1 << 31):
|
||||
value = value - (1 << 32)
|
||||
elif dtype == winreg.REG_SZ:
|
||||
# If it's a reference to the tzres DLL, load the actual string
|
||||
if value.startswith('@tzres'):
|
||||
tz_res = tz_res or tzres()
|
||||
value = tz_res.name_from_string(value)
|
||||
|
||||
value = value.rstrip('\x00') # Remove trailing nulls
|
||||
|
||||
dout[key_name] = value
|
||||
|
||||
return dout
|
||||
@@ -1,2 +0,0 @@
|
||||
# tzwin has moved to dateutil.tz.win
|
||||
from .tz.win import *
|
||||
@@ -1,183 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import warnings
|
||||
import json
|
||||
|
||||
from tarfile import TarFile
|
||||
from pkgutil import get_data
|
||||
from io import BytesIO
|
||||
from contextlib import closing
|
||||
|
||||
from dateutil.tz import tzfile
|
||||
|
||||
__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata", "rebuild"]
|
||||
|
||||
ZONEFILENAME = "dateutil-zoneinfo.tar.gz"
|
||||
METADATA_FN = 'METADATA'
|
||||
|
||||
# python2.6 compatability. Note that TarFile.__exit__ != TarFile.close, but
|
||||
# it's close enough for python2.6
|
||||
tar_open = TarFile.open
|
||||
if not hasattr(TarFile, '__exit__'):
|
||||
def tar_open(*args, **kwargs):
|
||||
return closing(TarFile.open(*args, **kwargs))
|
||||
|
||||
|
||||
class tzfile(tzfile):
|
||||
def __reduce__(self):
|
||||
return (gettz, (self._filename,))
|
||||
|
||||
|
||||
def getzoneinfofile_stream():
|
||||
try:
|
||||
return BytesIO(get_data(__name__, ZONEFILENAME))
|
||||
except IOError as e: # TODO switch to FileNotFoundError?
|
||||
warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror))
|
||||
return None
|
||||
|
||||
|
||||
class ZoneInfoFile(object):
|
||||
def __init__(self, zonefile_stream=None):
|
||||
if zonefile_stream is not None:
|
||||
with tar_open(fileobj=zonefile_stream, mode='r') as tf:
|
||||
# dict comprehension does not work on python2.6
|
||||
# TODO: get back to the nicer syntax when we ditch python2.6
|
||||
# self.zones = {zf.name: tzfile(tf.extractfile(zf),
|
||||
# filename = zf.name)
|
||||
# for zf in tf.getmembers() if zf.isfile()}
|
||||
self.zones = dict((zf.name, tzfile(tf.extractfile(zf),
|
||||
filename=zf.name))
|
||||
for zf in tf.getmembers()
|
||||
if zf.isfile() and zf.name != METADATA_FN)
|
||||
# deal with links: They'll point to their parent object. Less
|
||||
# waste of memory
|
||||
# links = {zl.name: self.zones[zl.linkname]
|
||||
# for zl in tf.getmembers() if zl.islnk() or zl.issym()}
|
||||
links = dict((zl.name, self.zones[zl.linkname])
|
||||
for zl in tf.getmembers() if
|
||||
zl.islnk() or zl.issym())
|
||||
self.zones.update(links)
|
||||
try:
|
||||
metadata_json = tf.extractfile(tf.getmember(METADATA_FN))
|
||||
metadata_str = metadata_json.read().decode('UTF-8')
|
||||
self.metadata = json.loads(metadata_str)
|
||||
except KeyError:
|
||||
# no metadata in tar file
|
||||
self.metadata = None
|
||||
else:
|
||||
self.zones = dict()
|
||||
self.metadata = None
|
||||
|
||||
def get(self, name, default=None):
|
||||
"""
|
||||
Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method
|
||||
for retrieving zones from the zone dictionary.
|
||||
|
||||
:param name:
|
||||
The name of the zone to retrieve. (Generally IANA zone names)
|
||||
|
||||
:param default:
|
||||
The value to return in the event of a missing key.
|
||||
|
||||
.. versionadded:: 2.6.0
|
||||
|
||||
"""
|
||||
return self.zones.get(name, default)
|
||||
|
||||
|
||||
# The current API has gettz as a module function, although in fact it taps into
|
||||
# a stateful class. So as a workaround for now, without changing the API, we
|
||||
# will create a new "global" class instance the first time a user requests a
|
||||
# timezone. Ugly, but adheres to the api.
|
||||
#
|
||||
# TODO: Remove after deprecation period.
|
||||
_CLASS_ZONE_INSTANCE = list()
|
||||
|
||||
|
||||
def get_zonefile_instance(new_instance=False):
|
||||
"""
|
||||
This is a convenience function which provides a :class:`ZoneInfoFile`
|
||||
instance using the data provided by the ``dateutil`` package. By default, it
|
||||
caches a single instance of the ZoneInfoFile object and returns that.
|
||||
|
||||
:param new_instance:
|
||||
If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and
|
||||
used as the cached instance for the next call. Otherwise, new instances
|
||||
are created only as necessary.
|
||||
|
||||
:return:
|
||||
Returns a :class:`ZoneInfoFile` object.
|
||||
|
||||
.. versionadded:: 2.6
|
||||
"""
|
||||
if new_instance:
|
||||
zif = None
|
||||
else:
|
||||
zif = getattr(get_zonefile_instance, '_cached_instance', None)
|
||||
|
||||
if zif is None:
|
||||
zif = ZoneInfoFile(getzoneinfofile_stream())
|
||||
|
||||
get_zonefile_instance._cached_instance = zif
|
||||
|
||||
return zif
|
||||
|
||||
|
||||
def gettz(name):
|
||||
"""
|
||||
This retrieves a time zone from the local zoneinfo tarball that is packaged
|
||||
with dateutil.
|
||||
|
||||
:param name:
|
||||
An IANA-style time zone name, as found in the zoneinfo file.
|
||||
|
||||
:return:
|
||||
Returns a :class:`dateutil.tz.tzfile` time zone object.
|
||||
|
||||
.. warning::
|
||||
It is generally inadvisable to use this function, and it is only
|
||||
provided for API compatibility with earlier versions. This is *not*
|
||||
equivalent to ``dateutil.tz.gettz()``, which selects an appropriate
|
||||
time zone based on the inputs, favoring system zoneinfo. This is ONLY
|
||||
for accessing the dateutil-specific zoneinfo (which may be out of
|
||||
date compared to the system zoneinfo).
|
||||
|
||||
.. deprecated:: 2.6
|
||||
If you need to use a specific zoneinfofile over the system zoneinfo,
|
||||
instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call
|
||||
:func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead.
|
||||
|
||||
Use :func:`get_zonefile_instance` to retrieve an instance of the
|
||||
dateutil-provided zoneinfo.
|
||||
"""
|
||||
warnings.warn("zoneinfo.gettz() will be removed in future versions, "
|
||||
"to use the dateutil-provided zoneinfo files, instantiate a "
|
||||
"ZoneInfoFile object and use ZoneInfoFile.zones.get() "
|
||||
"instead. See the documentation for details.",
|
||||
DeprecationWarning)
|
||||
|
||||
if len(_CLASS_ZONE_INSTANCE) == 0:
|
||||
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
||||
return _CLASS_ZONE_INSTANCE[0].zones.get(name)
|
||||
|
||||
|
||||
def gettz_db_metadata():
|
||||
""" Get the zonefile metadata
|
||||
|
||||
See `zonefile_metadata`_
|
||||
|
||||
:returns:
|
||||
A dictionary with the database metadata
|
||||
|
||||
.. deprecated:: 2.6
|
||||
See deprecation warning in :func:`zoneinfo.gettz`. To get metadata,
|
||||
query the attribute ``zoneinfo.ZoneInfoFile.metadata``.
|
||||
"""
|
||||
warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future "
|
||||
"versions, to use the dateutil-provided zoneinfo files, "
|
||||
"ZoneInfoFile object and query the 'metadata' attribute "
|
||||
"instead. See the documentation for details.",
|
||||
DeprecationWarning)
|
||||
|
||||
if len(_CLASS_ZONE_INSTANCE) == 0:
|
||||
_CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream()))
|
||||
return _CLASS_ZONE_INSTANCE[0].metadata
|
||||
Binary file not shown.
@@ -1,52 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import json
|
||||
from subprocess import check_call
|
||||
|
||||
from dateutil.zoneinfo import tar_open, METADATA_FN, ZONEFILENAME
|
||||
|
||||
|
||||
def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None):
|
||||
"""Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar*
|
||||
|
||||
filename is the timezone tarball from ftp.iana.org/tz.
|
||||
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
zonedir = os.path.join(tmpdir, "zoneinfo")
|
||||
moduledir = os.path.dirname(__file__)
|
||||
try:
|
||||
with tar_open(filename) as tf:
|
||||
for name in zonegroups:
|
||||
tf.extract(name, tmpdir)
|
||||
filepaths = [os.path.join(tmpdir, n) for n in zonegroups]
|
||||
try:
|
||||
check_call(["zic", "-d", zonedir] + filepaths)
|
||||
except OSError as e:
|
||||
_print_on_nosuchfile(e)
|
||||
raise
|
||||
# write metadata file
|
||||
with open(os.path.join(zonedir, METADATA_FN), 'w') as f:
|
||||
json.dump(metadata, f, indent=4, sort_keys=True)
|
||||
target = os.path.join(moduledir, ZONEFILENAME)
|
||||
with tar_open(target, "w:%s" % format) as tf:
|
||||
for entry in os.listdir(zonedir):
|
||||
entrypath = os.path.join(zonedir, entry)
|
||||
tf.add(entrypath, entry)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def _print_on_nosuchfile(e):
|
||||
"""Print helpful troubleshooting message
|
||||
|
||||
e is an exception raised by subprocess.check_call()
|
||||
|
||||
"""
|
||||
if e.errno == 2:
|
||||
logging.error(
|
||||
"Could not find zic. Perhaps you need to install "
|
||||
"libc-bin or some other package that provides it, "
|
||||
"or it's not in your PATH?")
|
||||
@@ -1,112 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.etree.ElementTree facade
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
import sys
|
||||
from xml.etree.ElementTree import TreeBuilder as _TreeBuilder
|
||||
from xml.etree.ElementTree import parse as _parse
|
||||
from xml.etree.ElementTree import tostring
|
||||
|
||||
from .common import PY3
|
||||
|
||||
|
||||
if PY3:
|
||||
import importlib
|
||||
else:
|
||||
from xml.etree.ElementTree import XMLParser as _XMLParser
|
||||
from xml.etree.ElementTree import iterparse as _iterparse
|
||||
from xml.etree.ElementTree import ParseError
|
||||
|
||||
|
||||
from .common import (DTDForbidden, EntitiesForbidden,
|
||||
ExternalReferenceForbidden, _generate_etree_functions)
|
||||
|
||||
__origin__ = "xml.etree.ElementTree"
|
||||
|
||||
|
||||
def _get_py3_cls():
|
||||
"""Python 3.3 hides the pure Python code but defusedxml requires it.
|
||||
|
||||
The code is based on test.support.import_fresh_module().
|
||||
"""
|
||||
pymodname = "xml.etree.ElementTree"
|
||||
cmodname = "_elementtree"
|
||||
|
||||
pymod = sys.modules.pop(pymodname, None)
|
||||
cmod = sys.modules.pop(cmodname, None)
|
||||
|
||||
sys.modules[cmodname] = None
|
||||
pure_pymod = importlib.import_module(pymodname)
|
||||
if cmod is not None:
|
||||
sys.modules[cmodname] = cmod
|
||||
else:
|
||||
sys.modules.pop(cmodname)
|
||||
sys.modules[pymodname] = pymod
|
||||
|
||||
_XMLParser = pure_pymod.XMLParser
|
||||
_iterparse = pure_pymod.iterparse
|
||||
ParseError = pure_pymod.ParseError
|
||||
|
||||
return _XMLParser, _iterparse, ParseError
|
||||
|
||||
|
||||
if PY3:
|
||||
_XMLParser, _iterparse, ParseError = _get_py3_cls()
|
||||
|
||||
|
||||
class DefusedXMLParser(_XMLParser):
|
||||
|
||||
def __init__(self, html=0, target=None, encoding=None,
|
||||
forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
# Python 2.x old style class
|
||||
_XMLParser.__init__(self, html, target, encoding)
|
||||
self.forbid_dtd = forbid_dtd
|
||||
self.forbid_entities = forbid_entities
|
||||
self.forbid_external = forbid_external
|
||||
if PY3:
|
||||
parser = self.parser
|
||||
else:
|
||||
parser = self._parser
|
||||
if self.forbid_dtd:
|
||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
||||
if self.forbid_entities:
|
||||
parser.EntityDeclHandler = self.defused_entity_decl
|
||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
||||
if self.forbid_external:
|
||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
||||
|
||||
def defused_start_doctype_decl(self, name, sysid, pubid,
|
||||
has_internal_subset):
|
||||
raise DTDForbidden(name, sysid, pubid)
|
||||
|
||||
def defused_entity_decl(self, name, is_parameter_entity, value, base,
|
||||
sysid, pubid, notation_name):
|
||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid,
|
||||
notation_name):
|
||||
# expat 1.2
|
||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_external_entity_ref_handler(self, context, base, sysid,
|
||||
pubid):
|
||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
||||
|
||||
|
||||
# aliases
|
||||
XMLTreeBuilder = XMLParse = DefusedXMLParser
|
||||
|
||||
parse, iterparse, fromstring = _generate_etree_functions(DefusedXMLParser,
|
||||
_TreeBuilder, _parse,
|
||||
_iterparse)
|
||||
XML = fromstring
|
||||
|
||||
|
||||
__all__ = ['XML', 'XMLParse', 'XMLTreeBuilder', 'fromstring', 'iterparse',
|
||||
'parse', 'tostring']
|
||||
@@ -1,45 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defuse XML bomb denial of service vulnerabilities
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
from .common import (DefusedXmlException, DTDForbidden, EntitiesForbidden,
|
||||
ExternalReferenceForbidden, NotSupportedError,
|
||||
_apply_defusing)
|
||||
|
||||
|
||||
def defuse_stdlib():
|
||||
"""Monkey patch and defuse all stdlib packages
|
||||
|
||||
:warning: The monkey patch is an EXPERIMETNAL feature.
|
||||
"""
|
||||
defused = {}
|
||||
|
||||
from . import cElementTree
|
||||
from . import ElementTree
|
||||
from . import minidom
|
||||
from . import pulldom
|
||||
from . import sax
|
||||
from . import expatbuilder
|
||||
from . import expatreader
|
||||
from . import xmlrpc
|
||||
|
||||
xmlrpc.monkey_patch()
|
||||
defused[xmlrpc] = None
|
||||
|
||||
for defused_mod in [cElementTree, ElementTree, minidom, pulldom, sax,
|
||||
expatbuilder, expatreader]:
|
||||
stdlib_mod = _apply_defusing(defused_mod)
|
||||
defused[defused_mod] = stdlib_mod
|
||||
|
||||
return defused
|
||||
|
||||
|
||||
__version__ = "0.5.0"
|
||||
|
||||
__all__ = ['DefusedXmlException', 'DTDForbidden', 'EntitiesForbidden',
|
||||
'ExternalReferenceForbidden', 'NotSupportedError']
|
||||
@@ -1,30 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.etree.cElementTree
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
from xml.etree.cElementTree import TreeBuilder as _TreeBuilder
|
||||
from xml.etree.cElementTree import parse as _parse
|
||||
from xml.etree.cElementTree import tostring
|
||||
# iterparse from ElementTree!
|
||||
from xml.etree.ElementTree import iterparse as _iterparse
|
||||
|
||||
from .ElementTree import DefusedXMLParser
|
||||
from .common import _generate_etree_functions
|
||||
|
||||
__origin__ = "xml.etree.cElementTree"
|
||||
|
||||
|
||||
XMLTreeBuilder = XMLParse = DefusedXMLParser
|
||||
|
||||
parse, iterparse, fromstring = _generate_etree_functions(DefusedXMLParser,
|
||||
_TreeBuilder, _parse,
|
||||
_iterparse)
|
||||
XML = fromstring
|
||||
|
||||
__all__ = ['XML', 'XMLParse', 'XMLTreeBuilder', 'fromstring', 'iterparse',
|
||||
'parse', 'tostring']
|
||||
@@ -1,120 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Common constants, exceptions and helpe functions
|
||||
"""
|
||||
import sys
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
|
||||
class DefusedXmlException(ValueError):
|
||||
"""Base exception
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
class DTDForbidden(DefusedXmlException):
|
||||
"""Document type definition is forbidden
|
||||
"""
|
||||
|
||||
def __init__(self, name, sysid, pubid):
|
||||
super(DTDForbidden, self).__init__()
|
||||
self.name = name
|
||||
self.sysid = sysid
|
||||
self.pubid = pubid
|
||||
|
||||
def __str__(self):
|
||||
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
|
||||
return tpl.format(self.name, self.sysid, self.pubid)
|
||||
|
||||
|
||||
class EntitiesForbidden(DefusedXmlException):
|
||||
"""Entity definition is forbidden
|
||||
"""
|
||||
|
||||
def __init__(self, name, value, base, sysid, pubid, notation_name):
|
||||
super(EntitiesForbidden, self).__init__()
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.base = base
|
||||
self.sysid = sysid
|
||||
self.pubid = pubid
|
||||
self.notation_name = notation_name
|
||||
|
||||
def __str__(self):
|
||||
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
|
||||
return tpl.format(self.name, self.sysid, self.pubid)
|
||||
|
||||
|
||||
class ExternalReferenceForbidden(DefusedXmlException):
|
||||
"""Resolving an external reference is forbidden
|
||||
"""
|
||||
|
||||
def __init__(self, context, base, sysid, pubid):
|
||||
super(ExternalReferenceForbidden, self).__init__()
|
||||
self.context = context
|
||||
self.base = base
|
||||
self.sysid = sysid
|
||||
self.pubid = pubid
|
||||
|
||||
def __str__(self):
|
||||
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
|
||||
return tpl.format(self.sysid, self.pubid)
|
||||
|
||||
|
||||
class NotSupportedError(DefusedXmlException):
|
||||
"""The operation is not supported
|
||||
"""
|
||||
|
||||
|
||||
def _apply_defusing(defused_mod):
|
||||
assert defused_mod is sys.modules[defused_mod.__name__]
|
||||
stdlib_name = defused_mod.__origin__
|
||||
__import__(stdlib_name, {}, {}, ["*"])
|
||||
stdlib_mod = sys.modules[stdlib_name]
|
||||
stdlib_names = set(dir(stdlib_mod))
|
||||
for name, obj in vars(defused_mod).items():
|
||||
if name.startswith("_") or name not in stdlib_names:
|
||||
continue
|
||||
setattr(stdlib_mod, name, obj)
|
||||
return stdlib_mod
|
||||
|
||||
|
||||
def _generate_etree_functions(DefusedXMLParser, _TreeBuilder,
|
||||
_parse, _iterparse):
|
||||
"""Factory for functions needed by etree, dependent on whether
|
||||
cElementTree or ElementTree is used."""
|
||||
|
||||
def parse(source, parser=None, forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
if parser is None:
|
||||
parser = DefusedXMLParser(target=_TreeBuilder(),
|
||||
forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
return _parse(source, parser)
|
||||
|
||||
def iterparse(source, events=None, parser=None, forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
if parser is None:
|
||||
parser = DefusedXMLParser(target=_TreeBuilder(),
|
||||
forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
return _iterparse(source, events, parser)
|
||||
|
||||
def fromstring(text, forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
parser = DefusedXMLParser(target=_TreeBuilder(),
|
||||
forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
parser.feed(text)
|
||||
return parser.close()
|
||||
|
||||
return parse, iterparse, fromstring
|
||||
@@ -1,110 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.dom.expatbuilder
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder
|
||||
from xml.dom.expatbuilder import Namespaces as _Namespaces
|
||||
|
||||
from .common import (DTDForbidden, EntitiesForbidden,
|
||||
ExternalReferenceForbidden)
|
||||
|
||||
__origin__ = "xml.dom.expatbuilder"
|
||||
|
||||
|
||||
class DefusedExpatBuilder(_ExpatBuilder):
|
||||
"""Defused document builder"""
|
||||
|
||||
def __init__(self, options=None, forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
_ExpatBuilder.__init__(self, options)
|
||||
self.forbid_dtd = forbid_dtd
|
||||
self.forbid_entities = forbid_entities
|
||||
self.forbid_external = forbid_external
|
||||
|
||||
def defused_start_doctype_decl(self, name, sysid, pubid,
|
||||
has_internal_subset):
|
||||
raise DTDForbidden(name, sysid, pubid)
|
||||
|
||||
def defused_entity_decl(self, name, is_parameter_entity, value, base,
|
||||
sysid, pubid, notation_name):
|
||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid,
|
||||
notation_name):
|
||||
# expat 1.2
|
||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_external_entity_ref_handler(self, context, base, sysid,
|
||||
pubid):
|
||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
||||
|
||||
def install(self, parser):
|
||||
_ExpatBuilder.install(self, parser)
|
||||
|
||||
if self.forbid_dtd:
|
||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
||||
if self.forbid_entities:
|
||||
# if self._options.entities:
|
||||
parser.EntityDeclHandler = self.defused_entity_decl
|
||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
||||
if self.forbid_external:
|
||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
||||
|
||||
|
||||
class DefusedExpatBuilderNS(_Namespaces, DefusedExpatBuilder):
|
||||
"""Defused document builder that supports namespaces."""
|
||||
|
||||
def install(self, parser):
|
||||
DefusedExpatBuilder.install(self, parser)
|
||||
if self._options.namespace_declarations:
|
||||
parser.StartNamespaceDeclHandler = (
|
||||
self.start_namespace_decl_handler)
|
||||
|
||||
def reset(self):
|
||||
DefusedExpatBuilder.reset(self)
|
||||
self._initNamespaces()
|
||||
|
||||
|
||||
def parse(file, namespaces=True, forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
"""Parse a document, returning the resulting Document node.
|
||||
|
||||
'file' may be either a file name or an open file object.
|
||||
"""
|
||||
if namespaces:
|
||||
build_builder = DefusedExpatBuilderNS
|
||||
else:
|
||||
build_builder = DefusedExpatBuilder
|
||||
builder = build_builder(forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
|
||||
if isinstance(file, str):
|
||||
fp = open(file, 'rb')
|
||||
try:
|
||||
result = builder.parseFile(fp)
|
||||
finally:
|
||||
fp.close()
|
||||
else:
|
||||
result = builder.parseFile(file)
|
||||
return result
|
||||
|
||||
|
||||
def parseString(string, namespaces=True, forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
"""Parse a document from a string, returning the resulting
|
||||
Document node.
|
||||
"""
|
||||
if namespaces:
|
||||
build_builder = DefusedExpatBuilderNS
|
||||
else:
|
||||
build_builder = DefusedExpatBuilder
|
||||
builder = build_builder(forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
return builder.parseString(string)
|
||||
@@ -1,59 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.sax.expatreader
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
from xml.sax.expatreader import ExpatParser as _ExpatParser
|
||||
|
||||
from .common import (DTDForbidden, EntitiesForbidden,
|
||||
ExternalReferenceForbidden)
|
||||
|
||||
__origin__ = "xml.sax.expatreader"
|
||||
|
||||
|
||||
class DefusedExpatParser(_ExpatParser):
|
||||
"""Defused SAX driver for the pyexpat C module."""
|
||||
|
||||
def __init__(self, namespaceHandling=0, bufsize=2 ** 16 - 20,
|
||||
forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
_ExpatParser.__init__(self, namespaceHandling, bufsize)
|
||||
self.forbid_dtd = forbid_dtd
|
||||
self.forbid_entities = forbid_entities
|
||||
self.forbid_external = forbid_external
|
||||
|
||||
def defused_start_doctype_decl(self, name, sysid, pubid,
|
||||
has_internal_subset):
|
||||
raise DTDForbidden(name, sysid, pubid)
|
||||
|
||||
def defused_entity_decl(self, name, is_parameter_entity, value, base,
|
||||
sysid, pubid, notation_name):
|
||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid,
|
||||
notation_name):
|
||||
# expat 1.2
|
||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_external_entity_ref_handler(self, context, base, sysid,
|
||||
pubid):
|
||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
||||
|
||||
def reset(self):
|
||||
_ExpatParser.reset(self)
|
||||
parser = self._parser
|
||||
if self.forbid_dtd:
|
||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
||||
if self.forbid_entities:
|
||||
parser.EntityDeclHandler = self.defused_entity_decl
|
||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
||||
if self.forbid_external:
|
||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
||||
|
||||
|
||||
def create_parser(*args, **kwargs):
|
||||
return DefusedExpatParser(*args, **kwargs)
|
||||
@@ -1,153 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Example code for lxml.etree protection
|
||||
|
||||
The code has NO protection against decompression bombs.
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
import threading
|
||||
from lxml import etree as _etree
|
||||
|
||||
from .common import DTDForbidden, EntitiesForbidden, NotSupportedError
|
||||
|
||||
LXML3 = _etree.LXML_VERSION[0] >= 3
|
||||
|
||||
__origin__ = "lxml.etree"
|
||||
|
||||
tostring = _etree.tostring
|
||||
|
||||
|
||||
class RestrictedElement(_etree.ElementBase):
|
||||
"""A restricted Element class that filters out instances of some classes
|
||||
"""
|
||||
__slots__ = ()
|
||||
# blacklist = (etree._Entity, etree._ProcessingInstruction, etree._Comment)
|
||||
blacklist = _etree._Entity
|
||||
|
||||
def _filter(self, iterator):
|
||||
blacklist = self.blacklist
|
||||
for child in iterator:
|
||||
if isinstance(child, blacklist):
|
||||
continue
|
||||
yield child
|
||||
|
||||
def __iter__(self):
|
||||
iterator = super(RestrictedElement, self).__iter__()
|
||||
return self._filter(iterator)
|
||||
|
||||
def iterchildren(self, tag=None, reversed=False):
|
||||
iterator = super(RestrictedElement, self).iterchildren(
|
||||
tag=tag, reversed=reversed)
|
||||
return self._filter(iterator)
|
||||
|
||||
def iter(self, tag=None, *tags):
|
||||
iterator = super(RestrictedElement, self).iter(tag=tag, *tags)
|
||||
return self._filter(iterator)
|
||||
|
||||
def iterdescendants(self, tag=None, *tags):
|
||||
iterator = super(RestrictedElement,
|
||||
self).iterdescendants(tag=tag, *tags)
|
||||
return self._filter(iterator)
|
||||
|
||||
def itersiblings(self, tag=None, preceding=False):
|
||||
iterator = super(RestrictedElement, self).itersiblings(
|
||||
tag=tag, preceding=preceding)
|
||||
return self._filter(iterator)
|
||||
|
||||
def getchildren(self):
|
||||
iterator = super(RestrictedElement, self).__iter__()
|
||||
return list(self._filter(iterator))
|
||||
|
||||
def getiterator(self, tag=None):
|
||||
iterator = super(RestrictedElement, self).getiterator(tag)
|
||||
return self._filter(iterator)
|
||||
|
||||
|
||||
class GlobalParserTLS(threading.local):
|
||||
"""Thread local context for custom parser instances
|
||||
"""
|
||||
parser_config = {
|
||||
'resolve_entities': False,
|
||||
# 'remove_comments': True,
|
||||
# 'remove_pis': True,
|
||||
}
|
||||
|
||||
element_class = RestrictedElement
|
||||
|
||||
def createDefaultParser(self):
|
||||
parser = _etree.XMLParser(**self.parser_config)
|
||||
element_class = self.element_class
|
||||
if self.element_class is not None:
|
||||
lookup = _etree.ElementDefaultClassLookup(element=element_class)
|
||||
parser.set_element_class_lookup(lookup)
|
||||
return parser
|
||||
|
||||
def setDefaultParser(self, parser):
|
||||
self._default_parser = parser
|
||||
|
||||
def getDefaultParser(self):
|
||||
parser = getattr(self, "_default_parser", None)
|
||||
if parser is None:
|
||||
parser = self.createDefaultParser()
|
||||
self.setDefaultParser(parser)
|
||||
return parser
|
||||
|
||||
|
||||
_parser_tls = GlobalParserTLS()
|
||||
getDefaultParser = _parser_tls.getDefaultParser
|
||||
|
||||
|
||||
def check_docinfo(elementtree, forbid_dtd=False, forbid_entities=True):
|
||||
"""Check docinfo of an element tree for DTD and entity declarations
|
||||
|
||||
The check for entity declarations needs lxml 3 or newer. lxml 2.x does
|
||||
not support dtd.iterentities().
|
||||
"""
|
||||
docinfo = elementtree.docinfo
|
||||
if docinfo.doctype:
|
||||
if forbid_dtd:
|
||||
raise DTDForbidden(docinfo.doctype,
|
||||
docinfo.system_url,
|
||||
docinfo.public_id)
|
||||
if forbid_entities and not LXML3:
|
||||
# lxml < 3 has no iterentities()
|
||||
raise NotSupportedError("Unable to check for entity declarations "
|
||||
"in lxml 2.x")
|
||||
|
||||
if forbid_entities:
|
||||
for dtd in docinfo.internalDTD, docinfo.externalDTD:
|
||||
if dtd is None:
|
||||
continue
|
||||
for entity in dtd.iterentities():
|
||||
raise EntitiesForbidden(entity.name, entity.content, None,
|
||||
None, None, None)
|
||||
|
||||
|
||||
def parse(source, parser=None, base_url=None, forbid_dtd=False,
|
||||
forbid_entities=True):
|
||||
if parser is None:
|
||||
parser = getDefaultParser()
|
||||
elementtree = _etree.parse(source, parser, base_url=base_url)
|
||||
check_docinfo(elementtree, forbid_dtd, forbid_entities)
|
||||
return elementtree
|
||||
|
||||
|
||||
def fromstring(text, parser=None, base_url=None, forbid_dtd=False,
|
||||
forbid_entities=True):
|
||||
if parser is None:
|
||||
parser = getDefaultParser()
|
||||
rootelement = _etree.fromstring(text, parser, base_url=base_url)
|
||||
elementtree = rootelement.getroottree()
|
||||
check_docinfo(elementtree, forbid_dtd, forbid_entities)
|
||||
return rootelement
|
||||
|
||||
|
||||
XML = fromstring
|
||||
|
||||
|
||||
def iterparse(*args, **kwargs):
|
||||
raise NotSupportedError("defused lxml.etree.iterparse not available")
|
||||
@@ -1,42 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.dom.minidom
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
from xml.dom.minidom import _do_pulldom_parse
|
||||
from . import expatbuilder as _expatbuilder
|
||||
from . import pulldom as _pulldom
|
||||
|
||||
__origin__ = "xml.dom.minidom"
|
||||
|
||||
|
||||
def parse(file, parser=None, bufsize=None, forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
"""Parse a file into a DOM by filename or file object."""
|
||||
if parser is None and not bufsize:
|
||||
return _expatbuilder.parse(file, forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
else:
|
||||
return _do_pulldom_parse(_pulldom.parse, (file,),
|
||||
{'parser': parser, 'bufsize': bufsize,
|
||||
'forbid_dtd': forbid_dtd, 'forbid_entities': forbid_entities,
|
||||
'forbid_external': forbid_external})
|
||||
|
||||
|
||||
def parseString(string, parser=None, forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
"""Parse a file into a DOM from a string."""
|
||||
if parser is None:
|
||||
return _expatbuilder.parseString(string, forbid_dtd=forbid_dtd,
|
||||
forbid_entities=forbid_entities,
|
||||
forbid_external=forbid_external)
|
||||
else:
|
||||
return _do_pulldom_parse(_pulldom.parseString, (string,),
|
||||
{'parser': parser, 'forbid_dtd': forbid_dtd,
|
||||
'forbid_entities': forbid_entities,
|
||||
'forbid_external': forbid_external})
|
||||
@@ -1,34 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.dom.pulldom
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
from xml.dom.pulldom import parse as _parse
|
||||
from xml.dom.pulldom import parseString as _parseString
|
||||
from .sax import make_parser
|
||||
|
||||
__origin__ = "xml.dom.pulldom"
|
||||
|
||||
|
||||
def parse(stream_or_string, parser=None, bufsize=None, forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
if parser is None:
|
||||
parser = make_parser()
|
||||
parser.forbid_dtd = forbid_dtd
|
||||
parser.forbid_entities = forbid_entities
|
||||
parser.forbid_external = forbid_external
|
||||
return _parse(stream_or_string, parser, bufsize)
|
||||
|
||||
|
||||
def parseString(string, parser=None, forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
if parser is None:
|
||||
parser = make_parser()
|
||||
parser.forbid_dtd = forbid_dtd
|
||||
parser.forbid_entities = forbid_entities
|
||||
parser.forbid_external = forbid_external
|
||||
return _parseString(string, parser)
|
||||
@@ -1,49 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xml.sax
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
from xml.sax import InputSource as _InputSource
|
||||
from xml.sax import ErrorHandler as _ErrorHandler
|
||||
|
||||
from . import expatreader
|
||||
|
||||
__origin__ = "xml.sax"
|
||||
|
||||
|
||||
def parse(source, handler, errorHandler=_ErrorHandler(), forbid_dtd=False,
|
||||
forbid_entities=True, forbid_external=True):
|
||||
parser = make_parser()
|
||||
parser.setContentHandler(handler)
|
||||
parser.setErrorHandler(errorHandler)
|
||||
parser.forbid_dtd = forbid_dtd
|
||||
parser.forbid_entities = forbid_entities
|
||||
parser.forbid_external = forbid_external
|
||||
parser.parse(source)
|
||||
|
||||
|
||||
def parseString(string, handler, errorHandler=_ErrorHandler(),
|
||||
forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
from io import BytesIO
|
||||
|
||||
if errorHandler is None:
|
||||
errorHandler = _ErrorHandler()
|
||||
parser = make_parser()
|
||||
parser.setContentHandler(handler)
|
||||
parser.setErrorHandler(errorHandler)
|
||||
parser.forbid_dtd = forbid_dtd
|
||||
parser.forbid_entities = forbid_entities
|
||||
parser.forbid_external = forbid_external
|
||||
|
||||
inpsrc = _InputSource()
|
||||
inpsrc.setByteStream(BytesIO(string))
|
||||
parser.parse(inpsrc)
|
||||
|
||||
|
||||
def make_parser(parser_list=[]):
|
||||
return expatreader.create_parser()
|
||||
@@ -1,157 +0,0 @@
|
||||
# defusedxml
|
||||
#
|
||||
# Copyright (c) 2013 by Christian Heimes <christian@python.org>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
# See http://www.python.org/psf/license for licensing details.
|
||||
"""Defused xmlrpclib
|
||||
|
||||
Also defuses gzip bomb
|
||||
"""
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
import io
|
||||
|
||||
from .common import (
|
||||
DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, PY3)
|
||||
|
||||
if PY3:
|
||||
__origin__ = "xmlrpc.client"
|
||||
from xmlrpc.client import ExpatParser
|
||||
from xmlrpc import client as xmlrpc_client
|
||||
from xmlrpc import server as xmlrpc_server
|
||||
from xmlrpc.client import gzip_decode as _orig_gzip_decode
|
||||
from xmlrpc.client import GzipDecodedResponse as _OrigGzipDecodedResponse
|
||||
else:
|
||||
__origin__ = "xmlrpclib"
|
||||
from xmlrpclib import ExpatParser
|
||||
import xmlrpclib as xmlrpc_client
|
||||
xmlrpc_server = None
|
||||
from xmlrpclib import gzip_decode as _orig_gzip_decode
|
||||
from xmlrpclib import GzipDecodedResponse as _OrigGzipDecodedResponse
|
||||
|
||||
try:
|
||||
import gzip
|
||||
except ImportError:
|
||||
gzip = None
|
||||
|
||||
|
||||
# Limit maximum request size to prevent resource exhaustion DoS
|
||||
# Also used to limit maximum amount of gzip decoded data in order to prevent
|
||||
# decompression bombs
|
||||
# A value of -1 or smaller disables the limit
|
||||
MAX_DATA = 30 * 1024 * 1024 # 30 MB
|
||||
|
||||
|
||||
def defused_gzip_decode(data, limit=None):
|
||||
"""gzip encoded data -> unencoded data
|
||||
|
||||
Decode data using the gzip content encoding as described in RFC 1952
|
||||
"""
|
||||
if not gzip:
|
||||
raise NotImplementedError
|
||||
if limit is None:
|
||||
limit = MAX_DATA
|
||||
f = io.BytesIO(data)
|
||||
gzf = gzip.GzipFile(mode="rb", fileobj=f)
|
||||
try:
|
||||
if limit < 0: # no limit
|
||||
decoded = gzf.read()
|
||||
else:
|
||||
decoded = gzf.read(limit + 1)
|
||||
except IOError:
|
||||
raise ValueError("invalid data")
|
||||
f.close()
|
||||
gzf.close()
|
||||
if limit >= 0 and len(decoded) > limit:
|
||||
raise ValueError("max gzipped payload length exceeded")
|
||||
return decoded
|
||||
|
||||
|
||||
class DefusedGzipDecodedResponse(gzip.GzipFile if gzip else object):
|
||||
"""a file-like object to decode a response encoded with the gzip
|
||||
method, as described in RFC 1952.
|
||||
"""
|
||||
|
||||
def __init__(self, response, limit=None):
|
||||
# response doesn't support tell() and read(), required by
|
||||
# GzipFile
|
||||
if not gzip:
|
||||
raise NotImplementedError
|
||||
self.limit = limit = limit if limit is not None else MAX_DATA
|
||||
if limit < 0: # no limit
|
||||
data = response.read()
|
||||
self.readlength = None
|
||||
else:
|
||||
data = response.read(limit + 1)
|
||||
self.readlength = 0
|
||||
if limit >= 0 and len(data) > limit:
|
||||
raise ValueError("max payload length exceeded")
|
||||
self.stringio = io.BytesIO(data)
|
||||
gzip.GzipFile.__init__(self, mode="rb", fileobj=self.stringio)
|
||||
|
||||
def read(self, n):
|
||||
if self.limit >= 0:
|
||||
left = self.limit - self.readlength
|
||||
n = min(n, left + 1)
|
||||
data = gzip.GzipFile.read(self, n)
|
||||
self.readlength += len(data)
|
||||
if self.readlength > self.limit:
|
||||
raise ValueError("max payload length exceeded")
|
||||
return data
|
||||
else:
|
||||
return gzip.GzipFile.read(self, n)
|
||||
|
||||
def close(self):
|
||||
gzip.GzipFile.close(self)
|
||||
self.stringio.close()
|
||||
|
||||
|
||||
class DefusedExpatParser(ExpatParser):
|
||||
|
||||
def __init__(self, target, forbid_dtd=False, forbid_entities=True,
|
||||
forbid_external=True):
|
||||
ExpatParser.__init__(self, target)
|
||||
self.forbid_dtd = forbid_dtd
|
||||
self.forbid_entities = forbid_entities
|
||||
self.forbid_external = forbid_external
|
||||
parser = self._parser
|
||||
if self.forbid_dtd:
|
||||
parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl
|
||||
if self.forbid_entities:
|
||||
parser.EntityDeclHandler = self.defused_entity_decl
|
||||
parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl
|
||||
if self.forbid_external:
|
||||
parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler
|
||||
|
||||
def defused_start_doctype_decl(self, name, sysid, pubid,
|
||||
has_internal_subset):
|
||||
raise DTDForbidden(name, sysid, pubid)
|
||||
|
||||
def defused_entity_decl(self, name, is_parameter_entity, value, base,
|
||||
sysid, pubid, notation_name):
|
||||
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_unparsed_entity_decl(self, name, base, sysid, pubid,
|
||||
notation_name):
|
||||
# expat 1.2
|
||||
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
|
||||
|
||||
def defused_external_entity_ref_handler(self, context, base, sysid,
|
||||
pubid):
|
||||
raise ExternalReferenceForbidden(context, base, sysid, pubid)
|
||||
|
||||
|
||||
def monkey_patch():
|
||||
xmlrpc_client.FastParser = DefusedExpatParser
|
||||
xmlrpc_client.GzipDecodedResponse = DefusedGzipDecodedResponse
|
||||
xmlrpc_client.gzip_decode = defused_gzip_decode
|
||||
if xmlrpc_server:
|
||||
xmlrpc_server.gzip_decode = defused_gzip_decode
|
||||
|
||||
|
||||
def unmonkey_patch():
|
||||
xmlrpc_client.FastParser = None
|
||||
xmlrpc_client.GzipDecodedResponse = _OrigGzipDecodedResponse
|
||||
xmlrpc_client.gzip_decode = _orig_gzip_decode
|
||||
if xmlrpc_server:
|
||||
xmlrpc_server.gzip_decode = _orig_gzip_decode
|
||||
@@ -1,49 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask
|
||||
~~~~~
|
||||
|
||||
A microframework based on Werkzeug. It's extensively documented
|
||||
and follows best practice patterns.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
__version__ = '1.0.3'
|
||||
|
||||
# utilities we import from Werkzeug and Jinja2 that are unused
|
||||
# in the module but are exported as public interface.
|
||||
from werkzeug.exceptions import abort
|
||||
from werkzeug.utils import redirect
|
||||
from jinja2 import Markup, escape
|
||||
|
||||
from .app import Flask, Request, Response
|
||||
from .config import Config
|
||||
from .helpers import url_for, flash, send_file, send_from_directory, \
|
||||
get_flashed_messages, get_template_attribute, make_response, safe_join, \
|
||||
stream_with_context
|
||||
from .globals import current_app, g, request, session, _request_ctx_stack, \
|
||||
_app_ctx_stack
|
||||
from .ctx import has_request_context, has_app_context, \
|
||||
after_this_request, copy_current_request_context
|
||||
from .blueprints import Blueprint
|
||||
from .templating import render_template, render_template_string
|
||||
|
||||
# the signals
|
||||
from .signals import signals_available, template_rendered, request_started, \
|
||||
request_finished, got_request_exception, request_tearing_down, \
|
||||
appcontext_tearing_down, appcontext_pushed, \
|
||||
appcontext_popped, message_flashed, before_render_template
|
||||
|
||||
# We're not exposing the actual json module but a convenient wrapper around
|
||||
# it.
|
||||
from . import json
|
||||
|
||||
# This was the only thing that Flask used to export at one point and it had
|
||||
# a more generic name.
|
||||
jsonify = json.jsonify
|
||||
|
||||
# backwards compat, goes away in 1.0
|
||||
from .sessions import SecureCookieSession as Session
|
||||
json_available = True
|
||||
@@ -1,14 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.__main__
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Alias for flask.run for the command line.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
if __name__ == '__main__':
|
||||
from .cli import main
|
||||
main(as_module=True)
|
||||
@@ -1,101 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask._compat
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Some py2/py3 compatibility support based on a stripped down
|
||||
version of six so we don't have to depend on a specific version
|
||||
of it.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
_identity = lambda x: x
|
||||
|
||||
|
||||
if not PY2:
|
||||
text_type = str
|
||||
string_types = (str,)
|
||||
integer_types = (int,)
|
||||
|
||||
iterkeys = lambda d: iter(d.keys())
|
||||
itervalues = lambda d: iter(d.values())
|
||||
iteritems = lambda d: iter(d.items())
|
||||
|
||||
from inspect import getfullargspec as getargspec
|
||||
from io import StringIO
|
||||
import collections.abc as collections_abc
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
implements_to_string = _identity
|
||||
|
||||
else:
|
||||
text_type = unicode
|
||||
string_types = (str, unicode)
|
||||
integer_types = (int, long)
|
||||
|
||||
iterkeys = lambda d: d.iterkeys()
|
||||
itervalues = lambda d: d.itervalues()
|
||||
iteritems = lambda d: d.iteritems()
|
||||
|
||||
from inspect import getargspec
|
||||
from cStringIO import StringIO
|
||||
import collections as collections_abc
|
||||
|
||||
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
|
||||
|
||||
def implements_to_string(cls):
|
||||
cls.__unicode__ = cls.__str__
|
||||
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
|
||||
return cls
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a
|
||||
# dummy metaclass for one level of class instantiation that replaces
|
||||
# itself with the actual metaclass.
|
||||
class metaclass(type):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
# Certain versions of pypy have a bug where clearing the exception stack
|
||||
# breaks the __exit__ function in a very peculiar way. The second level of
|
||||
# exception blocks is necessary because pypy seems to forget to check if an
|
||||
# exception happened until the next bytecode instruction?
|
||||
#
|
||||
# Relevant PyPy bugfix commit:
|
||||
# https://bitbucket.org/pypy/pypy/commits/77ecf91c635a287e88e60d8ddb0f4e9df4003301
|
||||
# According to ronan on #pypy IRC, it is released in PyPy2 2.3 and later
|
||||
# versions.
|
||||
#
|
||||
# Ubuntu 14.04 has PyPy 2.2.1, which does exhibit this bug.
|
||||
BROKEN_PYPY_CTXMGR_EXIT = False
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
class _Mgr(object):
|
||||
def __enter__(self):
|
||||
return self
|
||||
def __exit__(self, *args):
|
||||
if hasattr(sys, 'exc_clear'):
|
||||
# Python 3 (PyPy3) doesn't have exc_clear
|
||||
sys.exc_clear()
|
||||
try:
|
||||
try:
|
||||
with _Mgr():
|
||||
raise AssertionError()
|
||||
except:
|
||||
raise
|
||||
except TypeError:
|
||||
BROKEN_PYPY_CTXMGR_EXIT = True
|
||||
except AssertionError:
|
||||
pass
|
||||
2334
python/flask/app.py
2334
python/flask/app.py
File diff suppressed because it is too large
Load Diff
@@ -1,447 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.blueprints
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Blueprints are the recommended way to implement larger or more
|
||||
pluggable applications in Flask 0.7 and later.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
from functools import update_wrapper
|
||||
|
||||
from .helpers import _PackageBoundObject, _endpoint_from_view_func
|
||||
|
||||
|
||||
class BlueprintSetupState(object):
|
||||
"""Temporary holder object for registering a blueprint with the
|
||||
application. An instance of this class is created by the
|
||||
:meth:`~flask.Blueprint.make_setup_state` method and later passed
|
||||
to all register callback functions.
|
||||
"""
|
||||
|
||||
def __init__(self, blueprint, app, options, first_registration):
|
||||
#: a reference to the current application
|
||||
self.app = app
|
||||
|
||||
#: a reference to the blueprint that created this setup state.
|
||||
self.blueprint = blueprint
|
||||
|
||||
#: a dictionary with all options that were passed to the
|
||||
#: :meth:`~flask.Flask.register_blueprint` method.
|
||||
self.options = options
|
||||
|
||||
#: as blueprints can be registered multiple times with the
|
||||
#: application and not everything wants to be registered
|
||||
#: multiple times on it, this attribute can be used to figure
|
||||
#: out if the blueprint was registered in the past already.
|
||||
self.first_registration = first_registration
|
||||
|
||||
subdomain = self.options.get('subdomain')
|
||||
if subdomain is None:
|
||||
subdomain = self.blueprint.subdomain
|
||||
|
||||
#: The subdomain that the blueprint should be active for, ``None``
|
||||
#: otherwise.
|
||||
self.subdomain = subdomain
|
||||
|
||||
url_prefix = self.options.get('url_prefix')
|
||||
if url_prefix is None:
|
||||
url_prefix = self.blueprint.url_prefix
|
||||
#: The prefix that should be used for all URLs defined on the
|
||||
#: blueprint.
|
||||
self.url_prefix = url_prefix
|
||||
|
||||
#: A dictionary with URL defaults that is added to each and every
|
||||
#: URL that was defined with the blueprint.
|
||||
self.url_defaults = dict(self.blueprint.url_values_defaults)
|
||||
self.url_defaults.update(self.options.get('url_defaults', ()))
|
||||
|
||||
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
|
||||
"""A helper method to register a rule (and optionally a view function)
|
||||
to the application. The endpoint is automatically prefixed with the
|
||||
blueprint's name.
|
||||
"""
|
||||
if self.url_prefix is not None:
|
||||
if rule:
|
||||
rule = '/'.join((
|
||||
self.url_prefix.rstrip('/'), rule.lstrip('/')))
|
||||
else:
|
||||
rule = self.url_prefix
|
||||
options.setdefault('subdomain', self.subdomain)
|
||||
if endpoint is None:
|
||||
endpoint = _endpoint_from_view_func(view_func)
|
||||
defaults = self.url_defaults
|
||||
if 'defaults' in options:
|
||||
defaults = dict(defaults, **options.pop('defaults'))
|
||||
self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint),
|
||||
view_func, defaults=defaults, **options)
|
||||
|
||||
|
||||
class Blueprint(_PackageBoundObject):
|
||||
"""Represents a blueprint. A blueprint is an object that records
|
||||
functions that will be called with the
|
||||
:class:`~flask.blueprints.BlueprintSetupState` later to register functions
|
||||
or other things on the main application. See :ref:`blueprints` for more
|
||||
information.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
|
||||
warn_on_modifications = False
|
||||
_got_registered_once = False
|
||||
|
||||
#: Blueprint local JSON decoder class to use.
|
||||
#: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_encoder`.
|
||||
json_encoder = None
|
||||
#: Blueprint local JSON decoder class to use.
|
||||
#: Set to ``None`` to use the app's :class:`~flask.app.Flask.json_decoder`.
|
||||
json_decoder = None
|
||||
|
||||
# TODO remove the next three attrs when Sphinx :inherited-members: works
|
||||
# https://github.com/sphinx-doc/sphinx/issues/741
|
||||
|
||||
#: The name of the package or module that this app belongs to. Do not
|
||||
#: change this once it is set by the constructor.
|
||||
import_name = None
|
||||
|
||||
#: Location of the template files to be added to the template lookup.
|
||||
#: ``None`` if templates should not be added.
|
||||
template_folder = None
|
||||
|
||||
#: Absolute path to the package on the filesystem. Used to look up
|
||||
#: resources contained in the package.
|
||||
root_path = None
|
||||
|
||||
def __init__(self, name, import_name, static_folder=None,
|
||||
static_url_path=None, template_folder=None,
|
||||
url_prefix=None, subdomain=None, url_defaults=None,
|
||||
root_path=None):
|
||||
_PackageBoundObject.__init__(self, import_name, template_folder,
|
||||
root_path=root_path)
|
||||
self.name = name
|
||||
self.url_prefix = url_prefix
|
||||
self.subdomain = subdomain
|
||||
self.static_folder = static_folder
|
||||
self.static_url_path = static_url_path
|
||||
self.deferred_functions = []
|
||||
if url_defaults is None:
|
||||
url_defaults = {}
|
||||
self.url_values_defaults = url_defaults
|
||||
|
||||
def record(self, func):
|
||||
"""Registers a function that is called when the blueprint is
|
||||
registered on the application. This function is called with the
|
||||
state as argument as returned by the :meth:`make_setup_state`
|
||||
method.
|
||||
"""
|
||||
if self._got_registered_once and self.warn_on_modifications:
|
||||
from warnings import warn
|
||||
warn(Warning('The blueprint was already registered once '
|
||||
'but is getting modified now. These changes '
|
||||
'will not show up.'))
|
||||
self.deferred_functions.append(func)
|
||||
|
||||
def record_once(self, func):
|
||||
"""Works like :meth:`record` but wraps the function in another
|
||||
function that will ensure the function is only called once. If the
|
||||
blueprint is registered a second time on the application, the
|
||||
function passed is not called.
|
||||
"""
|
||||
def wrapper(state):
|
||||
if state.first_registration:
|
||||
func(state)
|
||||
return self.record(update_wrapper(wrapper, func))
|
||||
|
||||
def make_setup_state(self, app, options, first_registration=False):
|
||||
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState`
|
||||
object that is later passed to the register callback functions.
|
||||
Subclasses can override this to return a subclass of the setup state.
|
||||
"""
|
||||
return BlueprintSetupState(self, app, options, first_registration)
|
||||
|
||||
def register(self, app, options, first_registration=False):
|
||||
"""Called by :meth:`Flask.register_blueprint` to register all views
|
||||
and callbacks registered on the blueprint with the application. Creates
|
||||
a :class:`.BlueprintSetupState` and calls each :meth:`record` callback
|
||||
with it.
|
||||
|
||||
:param app: The application this blueprint is being registered with.
|
||||
:param options: Keyword arguments forwarded from
|
||||
:meth:`~Flask.register_blueprint`.
|
||||
:param first_registration: Whether this is the first time this
|
||||
blueprint has been registered on the application.
|
||||
"""
|
||||
self._got_registered_once = True
|
||||
state = self.make_setup_state(app, options, first_registration)
|
||||
|
||||
if self.has_static_folder:
|
||||
state.add_url_rule(
|
||||
self.static_url_path + '/<path:filename>',
|
||||
view_func=self.send_static_file, endpoint='static'
|
||||
)
|
||||
|
||||
for deferred in self.deferred_functions:
|
||||
deferred(state)
|
||||
|
||||
def route(self, rule, **options):
|
||||
"""Like :meth:`Flask.route` but for a blueprint. The endpoint for the
|
||||
:func:`url_for` function is prefixed with the name of the blueprint.
|
||||
"""
|
||||
def decorator(f):
|
||||
endpoint = options.pop("endpoint", f.__name__)
|
||||
self.add_url_rule(rule, endpoint, f, **options)
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
|
||||
"""Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for
|
||||
the :func:`url_for` function is prefixed with the name of the blueprint.
|
||||
"""
|
||||
if endpoint:
|
||||
assert '.' not in endpoint, "Blueprint endpoints should not contain dots"
|
||||
if view_func and hasattr(view_func, '__name__'):
|
||||
assert '.' not in view_func.__name__, "Blueprint view function name should not contain dots"
|
||||
self.record(lambda s:
|
||||
s.add_url_rule(rule, endpoint, view_func, **options))
|
||||
|
||||
def endpoint(self, endpoint):
|
||||
"""Like :meth:`Flask.endpoint` but for a blueprint. This does not
|
||||
prefix the endpoint with the blueprint name, this has to be done
|
||||
explicitly by the user of this method. If the endpoint is prefixed
|
||||
with a `.` it will be registered to the current blueprint, otherwise
|
||||
it's an application independent endpoint.
|
||||
"""
|
||||
def decorator(f):
|
||||
def register_endpoint(state):
|
||||
state.app.view_functions[endpoint] = f
|
||||
self.record_once(register_endpoint)
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def app_template_filter(self, name=None):
|
||||
"""Register a custom template filter, available application wide. Like
|
||||
:meth:`Flask.template_filter` but for a blueprint.
|
||||
|
||||
:param name: the optional name of the filter, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
def decorator(f):
|
||||
self.add_app_template_filter(f, name=name)
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def add_app_template_filter(self, f, name=None):
|
||||
"""Register a custom template filter, available application wide. Like
|
||||
:meth:`Flask.add_template_filter` but for a blueprint. Works exactly
|
||||
like the :meth:`app_template_filter` decorator.
|
||||
|
||||
:param name: the optional name of the filter, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
def register_template(state):
|
||||
state.app.jinja_env.filters[name or f.__name__] = f
|
||||
self.record_once(register_template)
|
||||
|
||||
def app_template_test(self, name=None):
|
||||
"""Register a custom template test, available application wide. Like
|
||||
:meth:`Flask.template_test` but for a blueprint.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the test, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
def decorator(f):
|
||||
self.add_app_template_test(f, name=name)
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def add_app_template_test(self, f, name=None):
|
||||
"""Register a custom template test, available application wide. Like
|
||||
:meth:`Flask.add_template_test` but for a blueprint. Works exactly
|
||||
like the :meth:`app_template_test` decorator.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the test, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
def register_template(state):
|
||||
state.app.jinja_env.tests[name or f.__name__] = f
|
||||
self.record_once(register_template)
|
||||
|
||||
def app_template_global(self, name=None):
|
||||
"""Register a custom template global, available application wide. Like
|
||||
:meth:`Flask.template_global` but for a blueprint.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the global, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
def decorator(f):
|
||||
self.add_app_template_global(f, name=name)
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def add_app_template_global(self, f, name=None):
|
||||
"""Register a custom template global, available application wide. Like
|
||||
:meth:`Flask.add_template_global` but for a blueprint. Works exactly
|
||||
like the :meth:`app_template_global` decorator.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
:param name: the optional name of the global, otherwise the
|
||||
function name will be used.
|
||||
"""
|
||||
def register_template(state):
|
||||
state.app.jinja_env.globals[name or f.__name__] = f
|
||||
self.record_once(register_template)
|
||||
|
||||
def before_request(self, f):
|
||||
"""Like :meth:`Flask.before_request` but for a blueprint. This function
|
||||
is only executed before each request that is handled by a function of
|
||||
that blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.before_request_funcs
|
||||
.setdefault(self.name, []).append(f))
|
||||
return f
|
||||
|
||||
def before_app_request(self, f):
|
||||
"""Like :meth:`Flask.before_request`. Such a function is executed
|
||||
before each request, even if outside of a blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.before_request_funcs
|
||||
.setdefault(None, []).append(f))
|
||||
return f
|
||||
|
||||
def before_app_first_request(self, f):
|
||||
"""Like :meth:`Flask.before_first_request`. Such a function is
|
||||
executed before the first request to the application.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.before_first_request_funcs.append(f))
|
||||
return f
|
||||
|
||||
def after_request(self, f):
|
||||
"""Like :meth:`Flask.after_request` but for a blueprint. This function
|
||||
is only executed after each request that is handled by a function of
|
||||
that blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.after_request_funcs
|
||||
.setdefault(self.name, []).append(f))
|
||||
return f
|
||||
|
||||
def after_app_request(self, f):
|
||||
"""Like :meth:`Flask.after_request` but for a blueprint. Such a function
|
||||
is executed after each request, even if outside of the blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.after_request_funcs
|
||||
.setdefault(None, []).append(f))
|
||||
return f
|
||||
|
||||
def teardown_request(self, f):
|
||||
"""Like :meth:`Flask.teardown_request` but for a blueprint. This
|
||||
function is only executed when tearing down requests handled by a
|
||||
function of that blueprint. Teardown request functions are executed
|
||||
when the request context is popped, even when no actual request was
|
||||
performed.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.teardown_request_funcs
|
||||
.setdefault(self.name, []).append(f))
|
||||
return f
|
||||
|
||||
def teardown_app_request(self, f):
|
||||
"""Like :meth:`Flask.teardown_request` but for a blueprint. Such a
|
||||
function is executed when tearing down each request, even if outside of
|
||||
the blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.teardown_request_funcs
|
||||
.setdefault(None, []).append(f))
|
||||
return f
|
||||
|
||||
def context_processor(self, f):
|
||||
"""Like :meth:`Flask.context_processor` but for a blueprint. This
|
||||
function is only executed for requests handled by a blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.template_context_processors
|
||||
.setdefault(self.name, []).append(f))
|
||||
return f
|
||||
|
||||
def app_context_processor(self, f):
|
||||
"""Like :meth:`Flask.context_processor` but for a blueprint. Such a
|
||||
function is executed each request, even if outside of the blueprint.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.template_context_processors
|
||||
.setdefault(None, []).append(f))
|
||||
return f
|
||||
|
||||
def app_errorhandler(self, code):
|
||||
"""Like :meth:`Flask.errorhandler` but for a blueprint. This
|
||||
handler is used for all requests, even if outside of the blueprint.
|
||||
"""
|
||||
def decorator(f):
|
||||
self.record_once(lambda s: s.app.errorhandler(code)(f))
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def url_value_preprocessor(self, f):
|
||||
"""Registers a function as URL value preprocessor for this
|
||||
blueprint. It's called before the view functions are called and
|
||||
can modify the url values provided.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.url_value_preprocessors
|
||||
.setdefault(self.name, []).append(f))
|
||||
return f
|
||||
|
||||
def url_defaults(self, f):
|
||||
"""Callback function for URL defaults for this blueprint. It's called
|
||||
with the endpoint and values and should update the values passed
|
||||
in place.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.url_default_functions
|
||||
.setdefault(self.name, []).append(f))
|
||||
return f
|
||||
|
||||
def app_url_value_preprocessor(self, f):
|
||||
"""Same as :meth:`url_value_preprocessor` but application wide.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.url_value_preprocessors
|
||||
.setdefault(None, []).append(f))
|
||||
return f
|
||||
|
||||
def app_url_defaults(self, f):
|
||||
"""Same as :meth:`url_defaults` but application wide.
|
||||
"""
|
||||
self.record_once(lambda s: s.app.url_default_functions
|
||||
.setdefault(None, []).append(f))
|
||||
return f
|
||||
|
||||
def errorhandler(self, code_or_exception):
|
||||
"""Registers an error handler that becomes active for this blueprint
|
||||
only. Please be aware that routing does not happen local to a
|
||||
blueprint so an error handler for 404 usually is not handled by
|
||||
a blueprint unless it is caused inside a view function. Another
|
||||
special case is the 500 internal server error which is always looked
|
||||
up from the application.
|
||||
|
||||
Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator
|
||||
of the :class:`~flask.Flask` object.
|
||||
"""
|
||||
def decorator(f):
|
||||
self.record_once(lambda s: s.app._register_error_handler(
|
||||
self.name, code_or_exception, f))
|
||||
return f
|
||||
return decorator
|
||||
|
||||
def register_error_handler(self, code_or_exception, f):
|
||||
"""Non-decorator version of the :meth:`errorhandler` error attach
|
||||
function, akin to the :meth:`~flask.Flask.register_error_handler`
|
||||
application-wide function of the :class:`~flask.Flask` object but
|
||||
for error handlers limited to this blueprint.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
self.record_once(lambda s: s.app._register_error_handler(
|
||||
self.name, code_or_exception, f))
|
||||
@@ -1,910 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.cli
|
||||
~~~~~~~~~
|
||||
|
||||
A simple command line application to run flask apps.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import ast
|
||||
import inspect
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import traceback
|
||||
from functools import update_wrapper
|
||||
from operator import attrgetter
|
||||
from threading import Lock, Thread
|
||||
|
||||
import click
|
||||
from werkzeug.utils import import_string
|
||||
|
||||
from . import __version__
|
||||
from ._compat import getargspec, iteritems, reraise, text_type
|
||||
from .globals import current_app
|
||||
from .helpers import get_debug_flag, get_env, get_load_dotenv
|
||||
|
||||
try:
|
||||
import dotenv
|
||||
except ImportError:
|
||||
dotenv = None
|
||||
|
||||
|
||||
class NoAppException(click.UsageError):
|
||||
"""Raised if an application cannot be found or loaded."""
|
||||
|
||||
|
||||
def find_best_app(script_info, module):
|
||||
"""Given a module instance this tries to find the best possible
|
||||
application in the module or raises an exception.
|
||||
"""
|
||||
from . import Flask
|
||||
|
||||
# Search for the most common names first.
|
||||
for attr_name in ('app', 'application'):
|
||||
app = getattr(module, attr_name, None)
|
||||
|
||||
if isinstance(app, Flask):
|
||||
return app
|
||||
|
||||
# Otherwise find the only object that is a Flask instance.
|
||||
matches = [
|
||||
v for k, v in iteritems(module.__dict__) if isinstance(v, Flask)
|
||||
]
|
||||
|
||||
if len(matches) == 1:
|
||||
return matches[0]
|
||||
elif len(matches) > 1:
|
||||
raise NoAppException(
|
||||
'Detected multiple Flask applications in module "{module}". Use '
|
||||
'"FLASK_APP={module}:name" to specify the correct '
|
||||
'one.'.format(module=module.__name__)
|
||||
)
|
||||
|
||||
# Search for app factory functions.
|
||||
for attr_name in ('create_app', 'make_app'):
|
||||
app_factory = getattr(module, attr_name, None)
|
||||
|
||||
if inspect.isfunction(app_factory):
|
||||
try:
|
||||
app = call_factory(script_info, app_factory)
|
||||
|
||||
if isinstance(app, Flask):
|
||||
return app
|
||||
except TypeError:
|
||||
if not _called_with_wrong_args(app_factory):
|
||||
raise
|
||||
raise NoAppException(
|
||||
'Detected factory "{factory}" in module "{module}", but '
|
||||
'could not call it without arguments. Use '
|
||||
'"FLASK_APP=\'{module}:{factory}(args)\'" to specify '
|
||||
'arguments.'.format(
|
||||
factory=attr_name, module=module.__name__
|
||||
)
|
||||
)
|
||||
|
||||
raise NoAppException(
|
||||
'Failed to find Flask application or factory in module "{module}". '
|
||||
'Use "FLASK_APP={module}:name to specify one.'.format(
|
||||
module=module.__name__
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def call_factory(script_info, app_factory, arguments=()):
|
||||
"""Takes an app factory, a ``script_info` object and optionally a tuple
|
||||
of arguments. Checks for the existence of a script_info argument and calls
|
||||
the app_factory depending on that and the arguments provided.
|
||||
"""
|
||||
args_spec = getargspec(app_factory)
|
||||
arg_names = args_spec.args
|
||||
arg_defaults = args_spec.defaults
|
||||
|
||||
if 'script_info' in arg_names:
|
||||
return app_factory(*arguments, script_info=script_info)
|
||||
elif arguments:
|
||||
return app_factory(*arguments)
|
||||
elif not arguments and len(arg_names) == 1 and arg_defaults is None:
|
||||
return app_factory(script_info)
|
||||
|
||||
return app_factory()
|
||||
|
||||
|
||||
def _called_with_wrong_args(factory):
|
||||
"""Check whether calling a function raised a ``TypeError`` because
|
||||
the call failed or because something in the factory raised the
|
||||
error.
|
||||
|
||||
:param factory: the factory function that was called
|
||||
:return: true if the call failed
|
||||
"""
|
||||
tb = sys.exc_info()[2]
|
||||
|
||||
try:
|
||||
while tb is not None:
|
||||
if tb.tb_frame.f_code is factory.__code__:
|
||||
# in the factory, it was called successfully
|
||||
return False
|
||||
|
||||
tb = tb.tb_next
|
||||
|
||||
# didn't reach the factory
|
||||
return True
|
||||
finally:
|
||||
del tb
|
||||
|
||||
|
||||
def find_app_by_string(script_info, module, app_name):
|
||||
"""Checks if the given string is a variable name or a function. If it is a
|
||||
function, it checks for specified arguments and whether it takes a
|
||||
``script_info`` argument and calls the function with the appropriate
|
||||
arguments.
|
||||
"""
|
||||
from flask import Flask
|
||||
match = re.match(r'^ *([^ ()]+) *(?:\((.*?) *,? *\))? *$', app_name)
|
||||
|
||||
if not match:
|
||||
raise NoAppException(
|
||||
'"{name}" is not a valid variable name or function '
|
||||
'expression.'.format(name=app_name)
|
||||
)
|
||||
|
||||
name, args = match.groups()
|
||||
|
||||
try:
|
||||
attr = getattr(module, name)
|
||||
except AttributeError as e:
|
||||
raise NoAppException(e.args[0])
|
||||
|
||||
if inspect.isfunction(attr):
|
||||
if args:
|
||||
try:
|
||||
args = ast.literal_eval('({args},)'.format(args=args))
|
||||
except (ValueError, SyntaxError)as e:
|
||||
raise NoAppException(
|
||||
'Could not parse the arguments in '
|
||||
'"{app_name}".'.format(e=e, app_name=app_name)
|
||||
)
|
||||
else:
|
||||
args = ()
|
||||
|
||||
try:
|
||||
app = call_factory(script_info, attr, args)
|
||||
except TypeError as e:
|
||||
if not _called_with_wrong_args(attr):
|
||||
raise
|
||||
|
||||
raise NoAppException(
|
||||
'{e}\nThe factory "{app_name}" in module "{module}" could not '
|
||||
'be called with the specified arguments.'.format(
|
||||
e=e, app_name=app_name, module=module.__name__
|
||||
)
|
||||
)
|
||||
else:
|
||||
app = attr
|
||||
|
||||
if isinstance(app, Flask):
|
||||
return app
|
||||
|
||||
raise NoAppException(
|
||||
'A valid Flask application was not obtained from '
|
||||
'"{module}:{app_name}".'.format(
|
||||
module=module.__name__, app_name=app_name
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def prepare_import(path):
|
||||
"""Given a filename this will try to calculate the python path, add it
|
||||
to the search path and return the actual module name that is expected.
|
||||
"""
|
||||
path = os.path.realpath(path)
|
||||
|
||||
if os.path.splitext(path)[1] == '.py':
|
||||
path = os.path.splitext(path)[0]
|
||||
|
||||
if os.path.basename(path) == '__init__':
|
||||
path = os.path.dirname(path)
|
||||
|
||||
module_name = []
|
||||
|
||||
# move up until outside package structure (no __init__.py)
|
||||
while True:
|
||||
path, name = os.path.split(path)
|
||||
module_name.append(name)
|
||||
|
||||
if not os.path.exists(os.path.join(path, '__init__.py')):
|
||||
break
|
||||
|
||||
if sys.path[0] != path:
|
||||
sys.path.insert(0, path)
|
||||
|
||||
return '.'.join(module_name[::-1])
|
||||
|
||||
|
||||
def locate_app(script_info, module_name, app_name, raise_if_not_found=True):
|
||||
__traceback_hide__ = True
|
||||
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError:
|
||||
# Reraise the ImportError if it occurred within the imported module.
|
||||
# Determine this by checking whether the trace has a depth > 1.
|
||||
if sys.exc_info()[-1].tb_next:
|
||||
raise NoAppException(
|
||||
'While importing "{name}", an ImportError was raised:'
|
||||
'\n\n{tb}'.format(name=module_name, tb=traceback.format_exc())
|
||||
)
|
||||
elif raise_if_not_found:
|
||||
raise NoAppException(
|
||||
'Could not import "{name}".'.format(name=module_name)
|
||||
)
|
||||
else:
|
||||
return
|
||||
|
||||
module = sys.modules[module_name]
|
||||
|
||||
if app_name is None:
|
||||
return find_best_app(script_info, module)
|
||||
else:
|
||||
return find_app_by_string(script_info, module, app_name)
|
||||
|
||||
|
||||
def get_version(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
import werkzeug
|
||||
message = (
|
||||
'Python %(python)s\n'
|
||||
'Flask %(flask)s\n'
|
||||
'Werkzeug %(werkzeug)s'
|
||||
)
|
||||
click.echo(message % {
|
||||
'python': platform.python_version(),
|
||||
'flask': __version__,
|
||||
'werkzeug': werkzeug.__version__,
|
||||
}, color=ctx.color)
|
||||
ctx.exit()
|
||||
|
||||
|
||||
version_option = click.Option(
|
||||
['--version'],
|
||||
help='Show the flask version',
|
||||
expose_value=False,
|
||||
callback=get_version,
|
||||
is_flag=True,
|
||||
is_eager=True
|
||||
)
|
||||
|
||||
|
||||
class DispatchingApp(object):
|
||||
"""Special application that dispatches to a Flask application which
|
||||
is imported by name in a background thread. If an error happens
|
||||
it is recorded and shown as part of the WSGI handling which in case
|
||||
of the Werkzeug debugger means that it shows up in the browser.
|
||||
"""
|
||||
|
||||
def __init__(self, loader, use_eager_loading=False):
|
||||
self.loader = loader
|
||||
self._app = None
|
||||
self._lock = Lock()
|
||||
self._bg_loading_exc_info = None
|
||||
if use_eager_loading:
|
||||
self._load_unlocked()
|
||||
else:
|
||||
self._load_in_background()
|
||||
|
||||
def _load_in_background(self):
|
||||
def _load_app():
|
||||
__traceback_hide__ = True
|
||||
with self._lock:
|
||||
try:
|
||||
self._load_unlocked()
|
||||
except Exception:
|
||||
self._bg_loading_exc_info = sys.exc_info()
|
||||
t = Thread(target=_load_app, args=())
|
||||
t.start()
|
||||
|
||||
def _flush_bg_loading_exception(self):
|
||||
__traceback_hide__ = True
|
||||
exc_info = self._bg_loading_exc_info
|
||||
if exc_info is not None:
|
||||
self._bg_loading_exc_info = None
|
||||
reraise(*exc_info)
|
||||
|
||||
def _load_unlocked(self):
|
||||
__traceback_hide__ = True
|
||||
self._app = rv = self.loader()
|
||||
self._bg_loading_exc_info = None
|
||||
return rv
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
__traceback_hide__ = True
|
||||
if self._app is not None:
|
||||
return self._app(environ, start_response)
|
||||
self._flush_bg_loading_exception()
|
||||
with self._lock:
|
||||
if self._app is not None:
|
||||
rv = self._app
|
||||
else:
|
||||
rv = self._load_unlocked()
|
||||
return rv(environ, start_response)
|
||||
|
||||
|
||||
class ScriptInfo(object):
|
||||
"""Helper object to deal with Flask applications. This is usually not
|
||||
necessary to interface with as it's used internally in the dispatching
|
||||
to click. In future versions of Flask this object will most likely play
|
||||
a bigger role. Typically it's created automatically by the
|
||||
:class:`FlaskGroup` but you can also manually create it and pass it
|
||||
onwards as click object.
|
||||
"""
|
||||
|
||||
def __init__(self, app_import_path=None, create_app=None,
|
||||
set_debug_flag=True):
|
||||
#: Optionally the import path for the Flask application.
|
||||
self.app_import_path = app_import_path or os.environ.get('FLASK_APP')
|
||||
#: Optionally a function that is passed the script info to create
|
||||
#: the instance of the application.
|
||||
self.create_app = create_app
|
||||
#: A dictionary with arbitrary data that can be associated with
|
||||
#: this script info.
|
||||
self.data = {}
|
||||
self.set_debug_flag = set_debug_flag
|
||||
self._loaded_app = None
|
||||
|
||||
def load_app(self):
|
||||
"""Loads the Flask app (if not yet loaded) and returns it. Calling
|
||||
this multiple times will just result in the already loaded app to
|
||||
be returned.
|
||||
"""
|
||||
__traceback_hide__ = True
|
||||
|
||||
if self._loaded_app is not None:
|
||||
return self._loaded_app
|
||||
|
||||
app = None
|
||||
|
||||
if self.create_app is not None:
|
||||
app = call_factory(self, self.create_app)
|
||||
else:
|
||||
if self.app_import_path:
|
||||
path, name = (re.split(r':(?![\\/])', self.app_import_path, 1) + [None])[:2]
|
||||
import_name = prepare_import(path)
|
||||
app = locate_app(self, import_name, name)
|
||||
else:
|
||||
for path in ('wsgi.py', 'app.py'):
|
||||
import_name = prepare_import(path)
|
||||
app = locate_app(self, import_name, None,
|
||||
raise_if_not_found=False)
|
||||
|
||||
if app:
|
||||
break
|
||||
|
||||
if not app:
|
||||
raise NoAppException(
|
||||
'Could not locate a Flask application. You did not provide '
|
||||
'the "FLASK_APP" environment variable, and a "wsgi.py" or '
|
||||
'"app.py" module was not found in the current directory.'
|
||||
)
|
||||
|
||||
if self.set_debug_flag:
|
||||
# Update the app's debug flag through the descriptor so that
|
||||
# other values repopulate as well.
|
||||
app.debug = get_debug_flag()
|
||||
|
||||
self._loaded_app = app
|
||||
return app
|
||||
|
||||
|
||||
pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True)
|
||||
|
||||
|
||||
def with_appcontext(f):
|
||||
"""Wraps a callback so that it's guaranteed to be executed with the
|
||||
script's application context. If callbacks are registered directly
|
||||
to the ``app.cli`` object then they are wrapped with this function
|
||||
by default unless it's disabled.
|
||||
"""
|
||||
@click.pass_context
|
||||
def decorator(__ctx, *args, **kwargs):
|
||||
with __ctx.ensure_object(ScriptInfo).load_app().app_context():
|
||||
return __ctx.invoke(f, *args, **kwargs)
|
||||
return update_wrapper(decorator, f)
|
||||
|
||||
|
||||
class AppGroup(click.Group):
|
||||
"""This works similar to a regular click :class:`~click.Group` but it
|
||||
changes the behavior of the :meth:`command` decorator so that it
|
||||
automatically wraps the functions in :func:`with_appcontext`.
|
||||
|
||||
Not to be confused with :class:`FlaskGroup`.
|
||||
"""
|
||||
|
||||
def command(self, *args, **kwargs):
|
||||
"""This works exactly like the method of the same name on a regular
|
||||
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext`
|
||||
unless it's disabled by passing ``with_appcontext=False``.
|
||||
"""
|
||||
wrap_for_ctx = kwargs.pop('with_appcontext', True)
|
||||
def decorator(f):
|
||||
if wrap_for_ctx:
|
||||
f = with_appcontext(f)
|
||||
return click.Group.command(self, *args, **kwargs)(f)
|
||||
return decorator
|
||||
|
||||
def group(self, *args, **kwargs):
|
||||
"""This works exactly like the method of the same name on a regular
|
||||
:class:`click.Group` but it defaults the group class to
|
||||
:class:`AppGroup`.
|
||||
"""
|
||||
kwargs.setdefault('cls', AppGroup)
|
||||
return click.Group.group(self, *args, **kwargs)
|
||||
|
||||
|
||||
class FlaskGroup(AppGroup):
|
||||
"""Special subclass of the :class:`AppGroup` group that supports
|
||||
loading more commands from the configured Flask app. Normally a
|
||||
developer does not have to interface with this class but there are
|
||||
some very advanced use cases for which it makes sense to create an
|
||||
instance of this.
|
||||
|
||||
For information as of why this is useful see :ref:`custom-scripts`.
|
||||
|
||||
:param add_default_commands: if this is True then the default run and
|
||||
shell commands wil be added.
|
||||
:param add_version_option: adds the ``--version`` option.
|
||||
:param create_app: an optional callback that is passed the script info and
|
||||
returns the loaded app.
|
||||
:param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv`
|
||||
files to set environment variables. Will also change the working
|
||||
directory to the directory containing the first file found.
|
||||
:param set_debug_flag: Set the app's debug flag based on the active
|
||||
environment
|
||||
|
||||
.. versionchanged:: 1.0
|
||||
If installed, python-dotenv will be used to load environment variables
|
||||
from :file:`.env` and :file:`.flaskenv` files.
|
||||
"""
|
||||
|
||||
def __init__(self, add_default_commands=True, create_app=None,
|
||||
add_version_option=True, load_dotenv=True,
|
||||
set_debug_flag=True, **extra):
|
||||
params = list(extra.pop('params', None) or ())
|
||||
|
||||
if add_version_option:
|
||||
params.append(version_option)
|
||||
|
||||
AppGroup.__init__(self, params=params, **extra)
|
||||
self.create_app = create_app
|
||||
self.load_dotenv = load_dotenv
|
||||
self.set_debug_flag = set_debug_flag
|
||||
|
||||
if add_default_commands:
|
||||
self.add_command(run_command)
|
||||
self.add_command(shell_command)
|
||||
self.add_command(routes_command)
|
||||
|
||||
self._loaded_plugin_commands = False
|
||||
|
||||
def _load_plugin_commands(self):
|
||||
if self._loaded_plugin_commands:
|
||||
return
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
self._loaded_plugin_commands = True
|
||||
return
|
||||
|
||||
for ep in pkg_resources.iter_entry_points('flask.commands'):
|
||||
self.add_command(ep.load(), ep.name)
|
||||
self._loaded_plugin_commands = True
|
||||
|
||||
def get_command(self, ctx, name):
|
||||
self._load_plugin_commands()
|
||||
|
||||
# We load built-in commands first as these should always be the
|
||||
# same no matter what the app does. If the app does want to
|
||||
# override this it needs to make a custom instance of this group
|
||||
# and not attach the default commands.
|
||||
#
|
||||
# This also means that the script stays functional in case the
|
||||
# application completely fails.
|
||||
rv = AppGroup.get_command(self, ctx, name)
|
||||
if rv is not None:
|
||||
return rv
|
||||
|
||||
info = ctx.ensure_object(ScriptInfo)
|
||||
try:
|
||||
rv = info.load_app().cli.get_command(ctx, name)
|
||||
if rv is not None:
|
||||
return rv
|
||||
except NoAppException:
|
||||
pass
|
||||
|
||||
def list_commands(self, ctx):
|
||||
self._load_plugin_commands()
|
||||
|
||||
# The commands available is the list of both the application (if
|
||||
# available) plus the builtin commands.
|
||||
rv = set(click.Group.list_commands(self, ctx))
|
||||
info = ctx.ensure_object(ScriptInfo)
|
||||
try:
|
||||
rv.update(info.load_app().cli.list_commands(ctx))
|
||||
except Exception:
|
||||
# Here we intentionally swallow all exceptions as we don't
|
||||
# want the help page to break if the app does not exist.
|
||||
# If someone attempts to use the command we try to create
|
||||
# the app again and this will give us the error.
|
||||
# However, we will not do so silently because that would confuse
|
||||
# users.
|
||||
traceback.print_exc()
|
||||
return sorted(rv)
|
||||
|
||||
def main(self, *args, **kwargs):
|
||||
# Set a global flag that indicates that we were invoked from the
|
||||
# command line interface. This is detected by Flask.run to make the
|
||||
# call into a no-op. This is necessary to avoid ugly errors when the
|
||||
# script that is loaded here also attempts to start a server.
|
||||
os.environ['FLASK_RUN_FROM_CLI'] = 'true'
|
||||
|
||||
if get_load_dotenv(self.load_dotenv):
|
||||
load_dotenv()
|
||||
|
||||
obj = kwargs.get('obj')
|
||||
|
||||
if obj is None:
|
||||
obj = ScriptInfo(create_app=self.create_app,
|
||||
set_debug_flag=self.set_debug_flag)
|
||||
|
||||
kwargs['obj'] = obj
|
||||
kwargs.setdefault('auto_envvar_prefix', 'FLASK')
|
||||
return super(FlaskGroup, self).main(*args, **kwargs)
|
||||
|
||||
|
||||
def _path_is_ancestor(path, other):
|
||||
"""Take ``other`` and remove the length of ``path`` from it. Then join it
|
||||
to ``path``. If it is the original value, ``path`` is an ancestor of
|
||||
``other``."""
|
||||
return os.path.join(path, other[len(path):].lstrip(os.sep)) == other
|
||||
|
||||
|
||||
def load_dotenv(path=None):
|
||||
"""Load "dotenv" files in order of precedence to set environment variables.
|
||||
|
||||
If an env var is already set it is not overwritten, so earlier files in the
|
||||
list are preferred over later files.
|
||||
|
||||
Changes the current working directory to the location of the first file
|
||||
found, with the assumption that it is in the top level project directory
|
||||
and will be where the Python path should import local packages from.
|
||||
|
||||
This is a no-op if `python-dotenv`_ is not installed.
|
||||
|
||||
.. _python-dotenv: https://github.com/theskumar/python-dotenv#readme
|
||||
|
||||
:param path: Load the file at this location instead of searching.
|
||||
:return: ``True`` if a file was loaded.
|
||||
|
||||
.. versionadded:: 1.0
|
||||
"""
|
||||
if dotenv is None:
|
||||
if path or os.path.exists('.env') or os.path.exists('.flaskenv'):
|
||||
click.secho(
|
||||
' * Tip: There are .env files present.'
|
||||
' Do "pip install python-dotenv" to use them.',
|
||||
fg='yellow')
|
||||
return
|
||||
|
||||
if path is not None:
|
||||
return dotenv.load_dotenv(path)
|
||||
|
||||
new_dir = None
|
||||
|
||||
for name in ('.env', '.flaskenv'):
|
||||
path = dotenv.find_dotenv(name, usecwd=True)
|
||||
|
||||
if not path:
|
||||
continue
|
||||
|
||||
if new_dir is None:
|
||||
new_dir = os.path.dirname(path)
|
||||
|
||||
dotenv.load_dotenv(path)
|
||||
|
||||
if new_dir and os.getcwd() != new_dir:
|
||||
os.chdir(new_dir)
|
||||
|
||||
return new_dir is not None # at least one file was located and loaded
|
||||
|
||||
|
||||
def show_server_banner(env, debug, app_import_path, eager_loading):
|
||||
"""Show extra startup messages the first time the server is run,
|
||||
ignoring the reloader.
|
||||
"""
|
||||
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
|
||||
return
|
||||
|
||||
if app_import_path is not None:
|
||||
message = ' * Serving Flask app "{0}"'.format(app_import_path)
|
||||
|
||||
if not eager_loading:
|
||||
message += ' (lazy loading)'
|
||||
|
||||
click.echo(message)
|
||||
|
||||
click.echo(' * Environment: {0}'.format(env))
|
||||
|
||||
if env == 'production':
|
||||
click.secho(
|
||||
' WARNING: This is a development server. '
|
||||
'Do not use it in a production deployment.', fg='red')
|
||||
click.secho(' Use a production WSGI server instead.', dim=True)
|
||||
|
||||
if debug is not None:
|
||||
click.echo(' * Debug mode: {0}'.format('on' if debug else 'off'))
|
||||
|
||||
|
||||
class CertParamType(click.ParamType):
|
||||
"""Click option type for the ``--cert`` option. Allows either an
|
||||
existing file, the string ``'adhoc'``, or an import for a
|
||||
:class:`~ssl.SSLContext` object.
|
||||
"""
|
||||
|
||||
name = 'path'
|
||||
|
||||
def __init__(self):
|
||||
self.path_type = click.Path(
|
||||
exists=True, dir_okay=False, resolve_path=True)
|
||||
|
||||
def convert(self, value, param, ctx):
|
||||
try:
|
||||
return self.path_type(value, param, ctx)
|
||||
except click.BadParameter:
|
||||
value = click.STRING(value, param, ctx).lower()
|
||||
|
||||
if value == 'adhoc':
|
||||
try:
|
||||
import OpenSSL
|
||||
except ImportError:
|
||||
raise click.BadParameter(
|
||||
'Using ad-hoc certificates requires pyOpenSSL.',
|
||||
ctx, param)
|
||||
|
||||
return value
|
||||
|
||||
obj = import_string(value, silent=True)
|
||||
|
||||
if sys.version_info < (2, 7, 9):
|
||||
if obj:
|
||||
return obj
|
||||
else:
|
||||
if isinstance(obj, ssl.SSLContext):
|
||||
return obj
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def _validate_key(ctx, param, value):
|
||||
"""The ``--key`` option must be specified when ``--cert`` is a file.
|
||||
Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed.
|
||||
"""
|
||||
cert = ctx.params.get('cert')
|
||||
is_adhoc = cert == 'adhoc'
|
||||
|
||||
if sys.version_info < (2, 7, 9):
|
||||
is_context = cert and not isinstance(cert, (text_type, bytes))
|
||||
else:
|
||||
is_context = isinstance(cert, ssl.SSLContext)
|
||||
|
||||
if value is not None:
|
||||
if is_adhoc:
|
||||
raise click.BadParameter(
|
||||
'When "--cert" is "adhoc", "--key" is not used.',
|
||||
ctx, param)
|
||||
|
||||
if is_context:
|
||||
raise click.BadParameter(
|
||||
'When "--cert" is an SSLContext object, "--key is not used.',
|
||||
ctx, param)
|
||||
|
||||
if not cert:
|
||||
raise click.BadParameter(
|
||||
'"--cert" must also be specified.',
|
||||
ctx, param)
|
||||
|
||||
ctx.params['cert'] = cert, value
|
||||
|
||||
else:
|
||||
if cert and not (is_adhoc or is_context):
|
||||
raise click.BadParameter(
|
||||
'Required when using "--cert".',
|
||||
ctx, param)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
@click.command('run', short_help='Run a development server.')
|
||||
@click.option('--host', '-h', default='127.0.0.1',
|
||||
help='The interface to bind to.')
|
||||
@click.option('--port', '-p', default=5000,
|
||||
help='The port to bind to.')
|
||||
@click.option('--cert', type=CertParamType(),
|
||||
help='Specify a certificate file to use HTTPS.')
|
||||
@click.option('--key',
|
||||
type=click.Path(exists=True, dir_okay=False, resolve_path=True),
|
||||
callback=_validate_key, expose_value=False,
|
||||
help='The key file to use when specifying a certificate.')
|
||||
@click.option('--reload/--no-reload', default=None,
|
||||
help='Enable or disable the reloader. By default the reloader '
|
||||
'is active if debug is enabled.')
|
||||
@click.option('--debugger/--no-debugger', default=None,
|
||||
help='Enable or disable the debugger. By default the debugger '
|
||||
'is active if debug is enabled.')
|
||||
@click.option('--eager-loading/--lazy-loader', default=None,
|
||||
help='Enable or disable eager loading. By default eager '
|
||||
'loading is enabled if the reloader is disabled.')
|
||||
@click.option('--with-threads/--without-threads', default=True,
|
||||
help='Enable or disable multithreading.')
|
||||
@pass_script_info
|
||||
def run_command(info, host, port, reload, debugger, eager_loading,
|
||||
with_threads, cert):
|
||||
"""Run a local development server.
|
||||
|
||||
This server is for development purposes only. It does not provide
|
||||
the stability, security, or performance of production WSGI servers.
|
||||
|
||||
The reloader and debugger are enabled by default if
|
||||
FLASK_ENV=development or FLASK_DEBUG=1.
|
||||
"""
|
||||
debug = get_debug_flag()
|
||||
|
||||
if reload is None:
|
||||
reload = debug
|
||||
|
||||
if debugger is None:
|
||||
debugger = debug
|
||||
|
||||
if eager_loading is None:
|
||||
eager_loading = not reload
|
||||
|
||||
show_server_banner(get_env(), debug, info.app_import_path, eager_loading)
|
||||
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
|
||||
|
||||
from werkzeug.serving import run_simple
|
||||
run_simple(host, port, app, use_reloader=reload, use_debugger=debugger,
|
||||
threaded=with_threads, ssl_context=cert)
|
||||
|
||||
|
||||
@click.command('shell', short_help='Run a shell in the app context.')
|
||||
@with_appcontext
|
||||
def shell_command():
|
||||
"""Run an interactive Python shell in the context of a given
|
||||
Flask application. The application will populate the default
|
||||
namespace of this shell according to it's configuration.
|
||||
|
||||
This is useful for executing small snippets of management code
|
||||
without having to manually configure the application.
|
||||
"""
|
||||
import code
|
||||
from flask.globals import _app_ctx_stack
|
||||
app = _app_ctx_stack.top.app
|
||||
banner = 'Python %s on %s\nApp: %s [%s]\nInstance: %s' % (
|
||||
sys.version,
|
||||
sys.platform,
|
||||
app.import_name,
|
||||
app.env,
|
||||
app.instance_path,
|
||||
)
|
||||
ctx = {}
|
||||
|
||||
# Support the regular Python interpreter startup script if someone
|
||||
# is using it.
|
||||
startup = os.environ.get('PYTHONSTARTUP')
|
||||
if startup and os.path.isfile(startup):
|
||||
with open(startup, 'r') as f:
|
||||
eval(compile(f.read(), startup, 'exec'), ctx)
|
||||
|
||||
ctx.update(app.make_shell_context())
|
||||
|
||||
code.interact(banner=banner, local=ctx)
|
||||
|
||||
|
||||
@click.command('routes', short_help='Show the routes for the app.')
|
||||
@click.option(
|
||||
'--sort', '-s',
|
||||
type=click.Choice(('endpoint', 'methods', 'rule', 'match')),
|
||||
default='endpoint',
|
||||
help=(
|
||||
'Method to sort routes by. "match" is the order that Flask will match '
|
||||
'routes when dispatching a request.'
|
||||
)
|
||||
)
|
||||
@click.option(
|
||||
'--all-methods',
|
||||
is_flag=True,
|
||||
help="Show HEAD and OPTIONS methods."
|
||||
)
|
||||
@with_appcontext
|
||||
def routes_command(sort, all_methods):
|
||||
"""Show all registered routes with endpoints and methods."""
|
||||
|
||||
rules = list(current_app.url_map.iter_rules())
|
||||
if not rules:
|
||||
click.echo('No routes were registered.')
|
||||
return
|
||||
|
||||
ignored_methods = set(() if all_methods else ('HEAD', 'OPTIONS'))
|
||||
|
||||
if sort in ('endpoint', 'rule'):
|
||||
rules = sorted(rules, key=attrgetter(sort))
|
||||
elif sort == 'methods':
|
||||
rules = sorted(rules, key=lambda rule: sorted(rule.methods))
|
||||
|
||||
rule_methods = [
|
||||
', '.join(sorted(rule.methods - ignored_methods)) for rule in rules
|
||||
]
|
||||
|
||||
headers = ('Endpoint', 'Methods', 'Rule')
|
||||
widths = (
|
||||
max(len(rule.endpoint) for rule in rules),
|
||||
max(len(methods) for methods in rule_methods),
|
||||
max(len(rule.rule) for rule in rules),
|
||||
)
|
||||
widths = [max(len(h), w) for h, w in zip(headers, widths)]
|
||||
row = '{{0:<{0}}} {{1:<{1}}} {{2:<{2}}}'.format(*widths)
|
||||
|
||||
click.echo(row.format(*headers).strip())
|
||||
click.echo(row.format(*('-' * width for width in widths)))
|
||||
|
||||
for rule, methods in zip(rules, rule_methods):
|
||||
click.echo(row.format(rule.endpoint, methods, rule.rule).rstrip())
|
||||
|
||||
|
||||
cli = FlaskGroup(help="""\
|
||||
A general utility script for Flask applications.
|
||||
|
||||
Provides commands from Flask, extensions, and the application. Loads the
|
||||
application defined in the FLASK_APP environment variable, or from a wsgi.py
|
||||
file. Setting the FLASK_ENV environment variable to 'development' will enable
|
||||
debug mode.
|
||||
|
||||
\b
|
||||
{prefix}{cmd} FLASK_APP=hello.py
|
||||
{prefix}{cmd} FLASK_ENV=development
|
||||
{prefix}flask run
|
||||
""".format(
|
||||
cmd='export' if os.name == 'posix' else 'set',
|
||||
prefix='$ ' if os.name == 'posix' else '> '
|
||||
))
|
||||
|
||||
|
||||
def main(as_module=False):
|
||||
args = sys.argv[1:]
|
||||
|
||||
if as_module:
|
||||
this_module = 'flask'
|
||||
|
||||
if sys.version_info < (2, 7):
|
||||
this_module += '.cli'
|
||||
|
||||
name = 'python -m ' + this_module
|
||||
|
||||
# Python rewrites "python -m flask" to the path to the file in argv.
|
||||
# Restore the original command so that the reloader works.
|
||||
sys.argv = ['-m', this_module] + args
|
||||
else:
|
||||
name = None
|
||||
|
||||
cli.main(args=args, prog_name=name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(as_module=True)
|
||||
@@ -1,269 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.config
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Implements the configuration related objects.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import os
|
||||
import types
|
||||
import errno
|
||||
|
||||
from werkzeug.utils import import_string
|
||||
from ._compat import string_types, iteritems
|
||||
from . import json
|
||||
|
||||
|
||||
class ConfigAttribute(object):
|
||||
"""Makes an attribute forward to the config"""
|
||||
|
||||
def __init__(self, name, get_converter=None):
|
||||
self.__name__ = name
|
||||
self.get_converter = get_converter
|
||||
|
||||
def __get__(self, obj, type=None):
|
||||
if obj is None:
|
||||
return self
|
||||
rv = obj.config[self.__name__]
|
||||
if self.get_converter is not None:
|
||||
rv = self.get_converter(rv)
|
||||
return rv
|
||||
|
||||
def __set__(self, obj, value):
|
||||
obj.config[self.__name__] = value
|
||||
|
||||
|
||||
class Config(dict):
|
||||
"""Works exactly like a dict but provides ways to fill it from files
|
||||
or special dictionaries. There are two common patterns to populate the
|
||||
config.
|
||||
|
||||
Either you can fill the config from a config file::
|
||||
|
||||
app.config.from_pyfile('yourconfig.cfg')
|
||||
|
||||
Or alternatively you can define the configuration options in the
|
||||
module that calls :meth:`from_object` or provide an import path to
|
||||
a module that should be loaded. It is also possible to tell it to
|
||||
use the same module and with that provide the configuration values
|
||||
just before the call::
|
||||
|
||||
DEBUG = True
|
||||
SECRET_KEY = 'development key'
|
||||
app.config.from_object(__name__)
|
||||
|
||||
In both cases (loading from any Python file or loading from modules),
|
||||
only uppercase keys are added to the config. This makes it possible to use
|
||||
lowercase values in the config file for temporary values that are not added
|
||||
to the config or to define the config keys in the same file that implements
|
||||
the application.
|
||||
|
||||
Probably the most interesting way to load configurations is from an
|
||||
environment variable pointing to a file::
|
||||
|
||||
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
|
||||
|
||||
In this case before launching the application you have to set this
|
||||
environment variable to the file you want to use. On Linux and OS X
|
||||
use the export statement::
|
||||
|
||||
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
|
||||
|
||||
On windows use `set` instead.
|
||||
|
||||
:param root_path: path to which files are read relative from. When the
|
||||
config object is created by the application, this is
|
||||
the application's :attr:`~flask.Flask.root_path`.
|
||||
:param defaults: an optional dictionary of default values
|
||||
"""
|
||||
|
||||
def __init__(self, root_path, defaults=None):
|
||||
dict.__init__(self, defaults or {})
|
||||
self.root_path = root_path
|
||||
|
||||
def from_envvar(self, variable_name, silent=False):
|
||||
"""Loads a configuration from an environment variable pointing to
|
||||
a configuration file. This is basically just a shortcut with nicer
|
||||
error messages for this line of code::
|
||||
|
||||
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
|
||||
|
||||
:param variable_name: name of the environment variable
|
||||
:param silent: set to ``True`` if you want silent failure for missing
|
||||
files.
|
||||
:return: bool. ``True`` if able to load config, ``False`` otherwise.
|
||||
"""
|
||||
rv = os.environ.get(variable_name)
|
||||
if not rv:
|
||||
if silent:
|
||||
return False
|
||||
raise RuntimeError('The environment variable %r is not set '
|
||||
'and as such configuration could not be '
|
||||
'loaded. Set this variable and make it '
|
||||
'point to a configuration file' %
|
||||
variable_name)
|
||||
return self.from_pyfile(rv, silent=silent)
|
||||
|
||||
def from_pyfile(self, filename, silent=False):
|
||||
"""Updates the values in the config from a Python file. This function
|
||||
behaves as if the file was imported as module with the
|
||||
:meth:`from_object` function.
|
||||
|
||||
:param filename: the filename of the config. This can either be an
|
||||
absolute filename or a filename relative to the
|
||||
root path.
|
||||
:param silent: set to ``True`` if you want silent failure for missing
|
||||
files.
|
||||
|
||||
.. versionadded:: 0.7
|
||||
`silent` parameter.
|
||||
"""
|
||||
filename = os.path.join(self.root_path, filename)
|
||||
d = types.ModuleType('config')
|
||||
d.__file__ = filename
|
||||
try:
|
||||
with open(filename, mode='rb') as config_file:
|
||||
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
|
||||
except IOError as e:
|
||||
if silent and e.errno in (
|
||||
errno.ENOENT, errno.EISDIR, errno.ENOTDIR
|
||||
):
|
||||
return False
|
||||
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
|
||||
raise
|
||||
self.from_object(d)
|
||||
return True
|
||||
|
||||
def from_object(self, obj):
|
||||
"""Updates the values from the given object. An object can be of one
|
||||
of the following two types:
|
||||
|
||||
- a string: in this case the object with that name will be imported
|
||||
- an actual object reference: that object is used directly
|
||||
|
||||
Objects are usually either modules or classes. :meth:`from_object`
|
||||
loads only the uppercase attributes of the module/class. A ``dict``
|
||||
object will not work with :meth:`from_object` because the keys of a
|
||||
``dict`` are not attributes of the ``dict`` class.
|
||||
|
||||
Example of module-based configuration::
|
||||
|
||||
app.config.from_object('yourapplication.default_config')
|
||||
from yourapplication import default_config
|
||||
app.config.from_object(default_config)
|
||||
|
||||
Nothing is done to the object before loading. If the object is a
|
||||
class and has ``@property`` attributes, it needs to be
|
||||
instantiated before being passed to this method.
|
||||
|
||||
You should not use this function to load the actual configuration but
|
||||
rather configuration defaults. The actual config should be loaded
|
||||
with :meth:`from_pyfile` and ideally from a location not within the
|
||||
package because the package might be installed system wide.
|
||||
|
||||
See :ref:`config-dev-prod` for an example of class-based configuration
|
||||
using :meth:`from_object`.
|
||||
|
||||
:param obj: an import name or object
|
||||
"""
|
||||
if isinstance(obj, string_types):
|
||||
obj = import_string(obj)
|
||||
for key in dir(obj):
|
||||
if key.isupper():
|
||||
self[key] = getattr(obj, key)
|
||||
|
||||
def from_json(self, filename, silent=False):
|
||||
"""Updates the values in the config from a JSON file. This function
|
||||
behaves as if the JSON object was a dictionary and passed to the
|
||||
:meth:`from_mapping` function.
|
||||
|
||||
:param filename: the filename of the JSON file. This can either be an
|
||||
absolute filename or a filename relative to the
|
||||
root path.
|
||||
:param silent: set to ``True`` if you want silent failure for missing
|
||||
files.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
filename = os.path.join(self.root_path, filename)
|
||||
|
||||
try:
|
||||
with open(filename) as json_file:
|
||||
obj = json.loads(json_file.read())
|
||||
except IOError as e:
|
||||
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
|
||||
return False
|
||||
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
|
||||
raise
|
||||
return self.from_mapping(obj)
|
||||
|
||||
def from_mapping(self, *mapping, **kwargs):
|
||||
"""Updates the config like :meth:`update` ignoring items with non-upper
|
||||
keys.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
mappings = []
|
||||
if len(mapping) == 1:
|
||||
if hasattr(mapping[0], 'items'):
|
||||
mappings.append(mapping[0].items())
|
||||
else:
|
||||
mappings.append(mapping[0])
|
||||
elif len(mapping) > 1:
|
||||
raise TypeError(
|
||||
'expected at most 1 positional argument, got %d' % len(mapping)
|
||||
)
|
||||
mappings.append(kwargs.items())
|
||||
for mapping in mappings:
|
||||
for (key, value) in mapping:
|
||||
if key.isupper():
|
||||
self[key] = value
|
||||
return True
|
||||
|
||||
def get_namespace(self, namespace, lowercase=True, trim_namespace=True):
|
||||
"""Returns a dictionary containing a subset of configuration options
|
||||
that match the specified namespace/prefix. Example usage::
|
||||
|
||||
app.config['IMAGE_STORE_TYPE'] = 'fs'
|
||||
app.config['IMAGE_STORE_PATH'] = '/var/app/images'
|
||||
app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com'
|
||||
image_store_config = app.config.get_namespace('IMAGE_STORE_')
|
||||
|
||||
The resulting dictionary `image_store_config` would look like::
|
||||
|
||||
{
|
||||
'type': 'fs',
|
||||
'path': '/var/app/images',
|
||||
'base_url': 'http://img.website.com'
|
||||
}
|
||||
|
||||
This is often useful when configuration options map directly to
|
||||
keyword arguments in functions or class constructors.
|
||||
|
||||
:param namespace: a configuration namespace
|
||||
:param lowercase: a flag indicating if the keys of the resulting
|
||||
dictionary should be lowercase
|
||||
:param trim_namespace: a flag indicating if the keys of the resulting
|
||||
dictionary should not include the namespace
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
rv = {}
|
||||
for k, v in iteritems(self):
|
||||
if not k.startswith(namespace):
|
||||
continue
|
||||
if trim_namespace:
|
||||
key = k[len(namespace):]
|
||||
else:
|
||||
key = k
|
||||
if lowercase:
|
||||
key = key.lower()
|
||||
rv[key] = v
|
||||
return rv
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self))
|
||||
@@ -1,457 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.ctx
|
||||
~~~~~~~~~
|
||||
|
||||
Implements the objects required to keep the context.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from functools import update_wrapper
|
||||
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
from .globals import _request_ctx_stack, _app_ctx_stack
|
||||
from .signals import appcontext_pushed, appcontext_popped
|
||||
from ._compat import BROKEN_PYPY_CTXMGR_EXIT, reraise
|
||||
|
||||
|
||||
# a singleton sentinel value for parameter defaults
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
class _AppCtxGlobals(object):
|
||||
"""A plain object. Used as a namespace for storing data during an
|
||||
application context.
|
||||
|
||||
Creating an app context automatically creates this object, which is
|
||||
made available as the :data:`g` proxy.
|
||||
|
||||
.. describe:: 'key' in g
|
||||
|
||||
Check whether an attribute is present.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
|
||||
.. describe:: iter(g)
|
||||
|
||||
Return an iterator over the attribute names.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
|
||||
def get(self, name, default=None):
|
||||
"""Get an attribute by name, or a default value. Like
|
||||
:meth:`dict.get`.
|
||||
|
||||
:param name: Name of attribute to get.
|
||||
:param default: Value to return if the attribute is not present.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
return self.__dict__.get(name, default)
|
||||
|
||||
def pop(self, name, default=_sentinel):
|
||||
"""Get and remove an attribute by name. Like :meth:`dict.pop`.
|
||||
|
||||
:param name: Name of attribute to pop.
|
||||
:param default: Value to return if the attribute is not present,
|
||||
instead of raise a ``KeyError``.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
if default is _sentinel:
|
||||
return self.__dict__.pop(name)
|
||||
else:
|
||||
return self.__dict__.pop(name, default)
|
||||
|
||||
def setdefault(self, name, default=None):
|
||||
"""Get the value of an attribute if it is present, otherwise
|
||||
set and return a default value. Like :meth:`dict.setdefault`.
|
||||
|
||||
:param name: Name of attribute to get.
|
||||
:param: default: Value to set and return if the attribute is not
|
||||
present.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
return self.__dict__.setdefault(name, default)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.__dict__
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__dict__)
|
||||
|
||||
def __repr__(self):
|
||||
top = _app_ctx_stack.top
|
||||
if top is not None:
|
||||
return '<flask.g of %r>' % top.app.name
|
||||
return object.__repr__(self)
|
||||
|
||||
|
||||
def after_this_request(f):
|
||||
"""Executes a function after this request. This is useful to modify
|
||||
response objects. The function is passed the response object and has
|
||||
to return the same or a new one.
|
||||
|
||||
Example::
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
@after_this_request
|
||||
def add_header(response):
|
||||
response.headers['X-Foo'] = 'Parachute'
|
||||
return response
|
||||
return 'Hello World!'
|
||||
|
||||
This is more useful if a function other than the view function wants to
|
||||
modify a response. For instance think of a decorator that wants to add
|
||||
some headers without converting the return value into a response object.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
_request_ctx_stack.top._after_request_functions.append(f)
|
||||
return f
|
||||
|
||||
|
||||
def copy_current_request_context(f):
|
||||
"""A helper function that decorates a function to retain the current
|
||||
request context. This is useful when working with greenlets. The moment
|
||||
the function is decorated a copy of the request context is created and
|
||||
then pushed when the function is called.
|
||||
|
||||
Example::
|
||||
|
||||
import gevent
|
||||
from flask import copy_current_request_context
|
||||
|
||||
@app.route('/')
|
||||
def index():
|
||||
@copy_current_request_context
|
||||
def do_some_work():
|
||||
# do some work here, it can access flask.request like you
|
||||
# would otherwise in the view function.
|
||||
...
|
||||
gevent.spawn(do_some_work)
|
||||
return 'Regular response'
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
top = _request_ctx_stack.top
|
||||
if top is None:
|
||||
raise RuntimeError('This decorator can only be used at local scopes '
|
||||
'when a request context is on the stack. For instance within '
|
||||
'view functions.')
|
||||
reqctx = top.copy()
|
||||
def wrapper(*args, **kwargs):
|
||||
with reqctx:
|
||||
return f(*args, **kwargs)
|
||||
return update_wrapper(wrapper, f)
|
||||
|
||||
|
||||
def has_request_context():
|
||||
"""If you have code that wants to test if a request context is there or
|
||||
not this function can be used. For instance, you may want to take advantage
|
||||
of request information if the request object is available, but fail
|
||||
silently if it is unavailable.
|
||||
|
||||
::
|
||||
|
||||
class User(db.Model):
|
||||
|
||||
def __init__(self, username, remote_addr=None):
|
||||
self.username = username
|
||||
if remote_addr is None and has_request_context():
|
||||
remote_addr = request.remote_addr
|
||||
self.remote_addr = remote_addr
|
||||
|
||||
Alternatively you can also just test any of the context bound objects
|
||||
(such as :class:`request` or :class:`g`) for truthness::
|
||||
|
||||
class User(db.Model):
|
||||
|
||||
def __init__(self, username, remote_addr=None):
|
||||
self.username = username
|
||||
if remote_addr is None and request:
|
||||
remote_addr = request.remote_addr
|
||||
self.remote_addr = remote_addr
|
||||
|
||||
.. versionadded:: 0.7
|
||||
"""
|
||||
return _request_ctx_stack.top is not None
|
||||
|
||||
|
||||
def has_app_context():
|
||||
"""Works like :func:`has_request_context` but for the application
|
||||
context. You can also just do a boolean check on the
|
||||
:data:`current_app` object instead.
|
||||
|
||||
.. versionadded:: 0.9
|
||||
"""
|
||||
return _app_ctx_stack.top is not None
|
||||
|
||||
|
||||
class AppContext(object):
|
||||
"""The application context binds an application object implicitly
|
||||
to the current thread or greenlet, similar to how the
|
||||
:class:`RequestContext` binds request information. The application
|
||||
context is also implicitly created if a request context is created
|
||||
but the application is not on top of the individual application
|
||||
context.
|
||||
"""
|
||||
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
self.url_adapter = app.create_url_adapter(None)
|
||||
self.g = app.app_ctx_globals_class()
|
||||
|
||||
# Like request context, app contexts can be pushed multiple times
|
||||
# but there a basic "refcount" is enough to track them.
|
||||
self._refcnt = 0
|
||||
|
||||
def push(self):
|
||||
"""Binds the app context to the current context."""
|
||||
self._refcnt += 1
|
||||
if hasattr(sys, 'exc_clear'):
|
||||
sys.exc_clear()
|
||||
_app_ctx_stack.push(self)
|
||||
appcontext_pushed.send(self.app)
|
||||
|
||||
def pop(self, exc=_sentinel):
|
||||
"""Pops the app context."""
|
||||
try:
|
||||
self._refcnt -= 1
|
||||
if self._refcnt <= 0:
|
||||
if exc is _sentinel:
|
||||
exc = sys.exc_info()[1]
|
||||
self.app.do_teardown_appcontext(exc)
|
||||
finally:
|
||||
rv = _app_ctx_stack.pop()
|
||||
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
|
||||
% (rv, self)
|
||||
appcontext_popped.send(self.app)
|
||||
|
||||
def __enter__(self):
|
||||
self.push()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
self.pop(exc_value)
|
||||
|
||||
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
|
||||
reraise(exc_type, exc_value, tb)
|
||||
|
||||
|
||||
class RequestContext(object):
|
||||
"""The request context contains all request relevant information. It is
|
||||
created at the beginning of the request and pushed to the
|
||||
`_request_ctx_stack` and removed at the end of it. It will create the
|
||||
URL adapter and request object for the WSGI environment provided.
|
||||
|
||||
Do not attempt to use this class directly, instead use
|
||||
:meth:`~flask.Flask.test_request_context` and
|
||||
:meth:`~flask.Flask.request_context` to create this object.
|
||||
|
||||
When the request context is popped, it will evaluate all the
|
||||
functions registered on the application for teardown execution
|
||||
(:meth:`~flask.Flask.teardown_request`).
|
||||
|
||||
The request context is automatically popped at the end of the request
|
||||
for you. In debug mode the request context is kept around if
|
||||
exceptions happen so that interactive debuggers have a chance to
|
||||
introspect the data. With 0.4 this can also be forced for requests
|
||||
that did not fail and outside of ``DEBUG`` mode. By setting
|
||||
``'flask._preserve_context'`` to ``True`` on the WSGI environment the
|
||||
context will not pop itself at the end of the request. This is used by
|
||||
the :meth:`~flask.Flask.test_client` for example to implement the
|
||||
deferred cleanup functionality.
|
||||
|
||||
You might find this helpful for unittests where you need the
|
||||
information from the context local around for a little longer. Make
|
||||
sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in
|
||||
that situation, otherwise your unittests will leak memory.
|
||||
"""
|
||||
|
||||
def __init__(self, app, environ, request=None):
|
||||
self.app = app
|
||||
if request is None:
|
||||
request = app.request_class(environ)
|
||||
self.request = request
|
||||
self.url_adapter = app.create_url_adapter(self.request)
|
||||
self.flashes = None
|
||||
self.session = None
|
||||
|
||||
# Request contexts can be pushed multiple times and interleaved with
|
||||
# other request contexts. Now only if the last level is popped we
|
||||
# get rid of them. Additionally if an application context is missing
|
||||
# one is created implicitly so for each level we add this information
|
||||
self._implicit_app_ctx_stack = []
|
||||
|
||||
# indicator if the context was preserved. Next time another context
|
||||
# is pushed the preserved context is popped.
|
||||
self.preserved = False
|
||||
|
||||
# remembers the exception for pop if there is one in case the context
|
||||
# preservation kicks in.
|
||||
self._preserved_exc = None
|
||||
|
||||
# Functions that should be executed after the request on the response
|
||||
# object. These will be called before the regular "after_request"
|
||||
# functions.
|
||||
self._after_request_functions = []
|
||||
|
||||
self.match_request()
|
||||
|
||||
def _get_g(self):
|
||||
return _app_ctx_stack.top.g
|
||||
def _set_g(self, value):
|
||||
_app_ctx_stack.top.g = value
|
||||
g = property(_get_g, _set_g)
|
||||
del _get_g, _set_g
|
||||
|
||||
def copy(self):
|
||||
"""Creates a copy of this request context with the same request object.
|
||||
This can be used to move a request context to a different greenlet.
|
||||
Because the actual request object is the same this cannot be used to
|
||||
move a request context to a different thread unless access to the
|
||||
request object is locked.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
"""
|
||||
return self.__class__(self.app,
|
||||
environ=self.request.environ,
|
||||
request=self.request
|
||||
)
|
||||
|
||||
def match_request(self):
|
||||
"""Can be overridden by a subclass to hook into the matching
|
||||
of the request.
|
||||
"""
|
||||
try:
|
||||
url_rule, self.request.view_args = \
|
||||
self.url_adapter.match(return_rule=True)
|
||||
self.request.url_rule = url_rule
|
||||
except HTTPException as e:
|
||||
self.request.routing_exception = e
|
||||
|
||||
def push(self):
|
||||
"""Binds the request context to the current context."""
|
||||
# If an exception occurs in debug mode or if context preservation is
|
||||
# activated under exception situations exactly one context stays
|
||||
# on the stack. The rationale is that you want to access that
|
||||
# information under debug situations. However if someone forgets to
|
||||
# pop that context again we want to make sure that on the next push
|
||||
# it's invalidated, otherwise we run at risk that something leaks
|
||||
# memory. This is usually only a problem in test suite since this
|
||||
# functionality is not active in production environments.
|
||||
top = _request_ctx_stack.top
|
||||
if top is not None and top.preserved:
|
||||
top.pop(top._preserved_exc)
|
||||
|
||||
# Before we push the request context we have to ensure that there
|
||||
# is an application context.
|
||||
app_ctx = _app_ctx_stack.top
|
||||
if app_ctx is None or app_ctx.app != self.app:
|
||||
app_ctx = self.app.app_context()
|
||||
app_ctx.push()
|
||||
self._implicit_app_ctx_stack.append(app_ctx)
|
||||
else:
|
||||
self._implicit_app_ctx_stack.append(None)
|
||||
|
||||
if hasattr(sys, 'exc_clear'):
|
||||
sys.exc_clear()
|
||||
|
||||
_request_ctx_stack.push(self)
|
||||
|
||||
# Open the session at the moment that the request context is available.
|
||||
# This allows a custom open_session method to use the request context.
|
||||
# Only open a new session if this is the first time the request was
|
||||
# pushed, otherwise stream_with_context loses the session.
|
||||
if self.session is None:
|
||||
session_interface = self.app.session_interface
|
||||
self.session = session_interface.open_session(
|
||||
self.app, self.request
|
||||
)
|
||||
|
||||
if self.session is None:
|
||||
self.session = session_interface.make_null_session(self.app)
|
||||
|
||||
def pop(self, exc=_sentinel):
|
||||
"""Pops the request context and unbinds it by doing that. This will
|
||||
also trigger the execution of functions registered by the
|
||||
:meth:`~flask.Flask.teardown_request` decorator.
|
||||
|
||||
.. versionchanged:: 0.9
|
||||
Added the `exc` argument.
|
||||
"""
|
||||
app_ctx = self._implicit_app_ctx_stack.pop()
|
||||
|
||||
try:
|
||||
clear_request = False
|
||||
if not self._implicit_app_ctx_stack:
|
||||
self.preserved = False
|
||||
self._preserved_exc = None
|
||||
if exc is _sentinel:
|
||||
exc = sys.exc_info()[1]
|
||||
self.app.do_teardown_request(exc)
|
||||
|
||||
# If this interpreter supports clearing the exception information
|
||||
# we do that now. This will only go into effect on Python 2.x,
|
||||
# on 3.x it disappears automatically at the end of the exception
|
||||
# stack.
|
||||
if hasattr(sys, 'exc_clear'):
|
||||
sys.exc_clear()
|
||||
|
||||
request_close = getattr(self.request, 'close', None)
|
||||
if request_close is not None:
|
||||
request_close()
|
||||
clear_request = True
|
||||
finally:
|
||||
rv = _request_ctx_stack.pop()
|
||||
|
||||
# get rid of circular dependencies at the end of the request
|
||||
# so that we don't require the GC to be active.
|
||||
if clear_request:
|
||||
rv.request.environ['werkzeug.request'] = None
|
||||
|
||||
# Get rid of the app as well if necessary.
|
||||
if app_ctx is not None:
|
||||
app_ctx.pop(exc)
|
||||
|
||||
assert rv is self, 'Popped wrong request context. ' \
|
||||
'(%r instead of %r)' % (rv, self)
|
||||
|
||||
def auto_pop(self, exc):
|
||||
if self.request.environ.get('flask._preserve_context') or \
|
||||
(exc is not None and self.app.preserve_context_on_exception):
|
||||
self.preserved = True
|
||||
self._preserved_exc = exc
|
||||
else:
|
||||
self.pop(exc)
|
||||
|
||||
def __enter__(self):
|
||||
self.push()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
# do not pop the request stack if we are in debug mode and an
|
||||
# exception happened. This will allow the debugger to still
|
||||
# access the request object in the interactive shell. Furthermore
|
||||
# the context can be force kept alive for the test client.
|
||||
# See flask.testing for how this works.
|
||||
self.auto_pop(exc_value)
|
||||
|
||||
if BROKEN_PYPY_CTXMGR_EXIT and exc_type is not None:
|
||||
reraise(exc_type, exc_value, tb)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s \'%s\' [%s] of %s>' % (
|
||||
self.__class__.__name__,
|
||||
self.request.url,
|
||||
self.request.method,
|
||||
self.app.name,
|
||||
)
|
||||
@@ -1,168 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.debughelpers
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Various helpers to make the development experience better.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import os
|
||||
from warnings import warn
|
||||
|
||||
from ._compat import implements_to_string, text_type
|
||||
from .app import Flask
|
||||
from .blueprints import Blueprint
|
||||
from .globals import _request_ctx_stack
|
||||
|
||||
|
||||
class UnexpectedUnicodeError(AssertionError, UnicodeError):
|
||||
"""Raised in places where we want some better error reporting for
|
||||
unexpected unicode or binary data.
|
||||
"""
|
||||
|
||||
|
||||
@implements_to_string
|
||||
class DebugFilesKeyError(KeyError, AssertionError):
|
||||
"""Raised from request.files during debugging. The idea is that it can
|
||||
provide a better error message than just a generic KeyError/BadRequest.
|
||||
"""
|
||||
|
||||
def __init__(self, request, key):
|
||||
form_matches = request.form.getlist(key)
|
||||
buf = ['You tried to access the file "%s" in the request.files '
|
||||
'dictionary but it does not exist. The mimetype for the request '
|
||||
'is "%s" instead of "multipart/form-data" which means that no '
|
||||
'file contents were transmitted. To fix this error you should '
|
||||
'provide enctype="multipart/form-data" in your form.' %
|
||||
(key, request.mimetype)]
|
||||
if form_matches:
|
||||
buf.append('\n\nThe browser instead transmitted some file names. '
|
||||
'This was submitted: %s' % ', '.join('"%s"' % x
|
||||
for x in form_matches))
|
||||
self.msg = ''.join(buf)
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
|
||||
class FormDataRoutingRedirect(AssertionError):
|
||||
"""This exception is raised by Flask in debug mode if it detects a
|
||||
redirect caused by the routing system when the request method is not
|
||||
GET, HEAD or OPTIONS. Reasoning: form data will be dropped.
|
||||
"""
|
||||
|
||||
def __init__(self, request):
|
||||
exc = request.routing_exception
|
||||
buf = ['A request was sent to this URL (%s) but a redirect was '
|
||||
'issued automatically by the routing system to "%s".'
|
||||
% (request.url, exc.new_url)]
|
||||
|
||||
# In case just a slash was appended we can be extra helpful
|
||||
if request.base_url + '/' == exc.new_url.split('?')[0]:
|
||||
buf.append(' The URL was defined with a trailing slash so '
|
||||
'Flask will automatically redirect to the URL '
|
||||
'with the trailing slash if it was accessed '
|
||||
'without one.')
|
||||
|
||||
buf.append(' Make sure to directly send your %s-request to this URL '
|
||||
'since we can\'t make browsers or HTTP clients redirect '
|
||||
'with form data reliably or without user interaction.' %
|
||||
request.method)
|
||||
buf.append('\n\nNote: this exception is only raised in debug mode')
|
||||
AssertionError.__init__(self, ''.join(buf).encode('utf-8'))
|
||||
|
||||
|
||||
def attach_enctype_error_multidict(request):
|
||||
"""Since Flask 0.8 we're monkeypatching the files object in case a
|
||||
request is detected that does not use multipart form data but the files
|
||||
object is accessed.
|
||||
"""
|
||||
oldcls = request.files.__class__
|
||||
class newcls(oldcls):
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return oldcls.__getitem__(self, key)
|
||||
except KeyError:
|
||||
if key not in request.form:
|
||||
raise
|
||||
raise DebugFilesKeyError(request, key)
|
||||
newcls.__name__ = oldcls.__name__
|
||||
newcls.__module__ = oldcls.__module__
|
||||
request.files.__class__ = newcls
|
||||
|
||||
|
||||
def _dump_loader_info(loader):
|
||||
yield 'class: %s.%s' % (type(loader).__module__, type(loader).__name__)
|
||||
for key, value in sorted(loader.__dict__.items()):
|
||||
if key.startswith('_'):
|
||||
continue
|
||||
if isinstance(value, (tuple, list)):
|
||||
if not all(isinstance(x, (str, text_type)) for x in value):
|
||||
continue
|
||||
yield '%s:' % key
|
||||
for item in value:
|
||||
yield ' - %s' % item
|
||||
continue
|
||||
elif not isinstance(value, (str, text_type, int, float, bool)):
|
||||
continue
|
||||
yield '%s: %r' % (key, value)
|
||||
|
||||
|
||||
def explain_template_loading_attempts(app, template, attempts):
|
||||
"""This should help developers understand what failed"""
|
||||
info = ['Locating template "%s":' % template]
|
||||
total_found = 0
|
||||
blueprint = None
|
||||
reqctx = _request_ctx_stack.top
|
||||
if reqctx is not None and reqctx.request.blueprint is not None:
|
||||
blueprint = reqctx.request.blueprint
|
||||
|
||||
for idx, (loader, srcobj, triple) in enumerate(attempts):
|
||||
if isinstance(srcobj, Flask):
|
||||
src_info = 'application "%s"' % srcobj.import_name
|
||||
elif isinstance(srcobj, Blueprint):
|
||||
src_info = 'blueprint "%s" (%s)' % (srcobj.name,
|
||||
srcobj.import_name)
|
||||
else:
|
||||
src_info = repr(srcobj)
|
||||
|
||||
info.append('% 5d: trying loader of %s' % (
|
||||
idx + 1, src_info))
|
||||
|
||||
for line in _dump_loader_info(loader):
|
||||
info.append(' %s' % line)
|
||||
|
||||
if triple is None:
|
||||
detail = 'no match'
|
||||
else:
|
||||
detail = 'found (%r)' % (triple[1] or '<string>')
|
||||
total_found += 1
|
||||
info.append(' -> %s' % detail)
|
||||
|
||||
seems_fishy = False
|
||||
if total_found == 0:
|
||||
info.append('Error: the template could not be found.')
|
||||
seems_fishy = True
|
||||
elif total_found > 1:
|
||||
info.append('Warning: multiple loaders returned a match for the template.')
|
||||
seems_fishy = True
|
||||
|
||||
if blueprint is not None and seems_fishy:
|
||||
info.append(' The template was looked up from an endpoint that '
|
||||
'belongs to the blueprint "%s".' % blueprint)
|
||||
info.append(' Maybe you did not place a template in the right folder?')
|
||||
info.append(' See http://flask.pocoo.org/docs/blueprints/#templates')
|
||||
|
||||
app.logger.info('\n'.join(info))
|
||||
|
||||
|
||||
def explain_ignored_app_run():
|
||||
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
|
||||
warn(Warning('Silently ignoring app.run() because the '
|
||||
'application is run from the flask command line '
|
||||
'executable. Consider putting app.run() behind an '
|
||||
'if __name__ == "__main__" guard to silence this '
|
||||
'warning.'), stacklevel=3)
|
||||
@@ -1,61 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.globals
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Defines all the global objects that are proxies to the current
|
||||
active context.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
from werkzeug.local import LocalStack, LocalProxy
|
||||
|
||||
|
||||
_request_ctx_err_msg = '''\
|
||||
Working outside of request context.
|
||||
|
||||
This typically means that you attempted to use functionality that needed
|
||||
an active HTTP request. Consult the documentation on testing for
|
||||
information about how to avoid this problem.\
|
||||
'''
|
||||
_app_ctx_err_msg = '''\
|
||||
Working outside of application context.
|
||||
|
||||
This typically means that you attempted to use functionality that needed
|
||||
to interface with the current application object in some way. To solve
|
||||
this, set up an application context with app.app_context(). See the
|
||||
documentation for more information.\
|
||||
'''
|
||||
|
||||
|
||||
def _lookup_req_object(name):
|
||||
top = _request_ctx_stack.top
|
||||
if top is None:
|
||||
raise RuntimeError(_request_ctx_err_msg)
|
||||
return getattr(top, name)
|
||||
|
||||
|
||||
def _lookup_app_object(name):
|
||||
top = _app_ctx_stack.top
|
||||
if top is None:
|
||||
raise RuntimeError(_app_ctx_err_msg)
|
||||
return getattr(top, name)
|
||||
|
||||
|
||||
def _find_app():
|
||||
top = _app_ctx_stack.top
|
||||
if top is None:
|
||||
raise RuntimeError(_app_ctx_err_msg)
|
||||
return top.app
|
||||
|
||||
|
||||
# context locals
|
||||
_request_ctx_stack = LocalStack()
|
||||
_app_ctx_stack = LocalStack()
|
||||
current_app = LocalProxy(_find_app)
|
||||
request = LocalProxy(partial(_lookup_req_object, 'request'))
|
||||
session = LocalProxy(partial(_lookup_req_object, 'session'))
|
||||
g = LocalProxy(partial(_lookup_app_object, 'g'))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,357 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.json
|
||||
~~~~~~~~~~
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
import codecs
|
||||
import io
|
||||
import uuid
|
||||
from datetime import date, datetime
|
||||
from flask.globals import current_app, request
|
||||
from flask._compat import text_type, PY2
|
||||
|
||||
from werkzeug.http import http_date
|
||||
from jinja2 import Markup
|
||||
|
||||
# Use the same json implementation as itsdangerous on which we
|
||||
# depend anyways.
|
||||
from itsdangerous import json as _json
|
||||
|
||||
|
||||
# Figure out if simplejson escapes slashes. This behavior was changed
|
||||
# from one version to another without reason.
|
||||
_slash_escape = '\\/' not in _json.dumps('/')
|
||||
|
||||
|
||||
__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump',
|
||||
'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder',
|
||||
'jsonify']
|
||||
|
||||
|
||||
def _wrap_reader_for_text(fp, encoding):
|
||||
if isinstance(fp.read(0), bytes):
|
||||
fp = io.TextIOWrapper(io.BufferedReader(fp), encoding)
|
||||
return fp
|
||||
|
||||
|
||||
def _wrap_writer_for_text(fp, encoding):
|
||||
try:
|
||||
fp.write('')
|
||||
except TypeError:
|
||||
fp = io.TextIOWrapper(fp, encoding)
|
||||
return fp
|
||||
|
||||
|
||||
class JSONEncoder(_json.JSONEncoder):
|
||||
"""The default Flask JSON encoder. This one extends the default simplejson
|
||||
encoder by also supporting ``datetime`` objects, ``UUID`` as well as
|
||||
``Markup`` objects which are serialized as RFC 822 datetime strings (same
|
||||
as the HTTP date format). In order to support more data types override the
|
||||
:meth:`default` method.
|
||||
"""
|
||||
|
||||
def default(self, o):
|
||||
"""Implement this method in a subclass such that it returns a
|
||||
serializable object for ``o``, or calls the base implementation (to
|
||||
raise a :exc:`TypeError`).
|
||||
|
||||
For example, to support arbitrary iterators, you could implement
|
||||
default like this::
|
||||
|
||||
def default(self, o):
|
||||
try:
|
||||
iterable = iter(o)
|
||||
except TypeError:
|
||||
pass
|
||||
else:
|
||||
return list(iterable)
|
||||
return JSONEncoder.default(self, o)
|
||||
"""
|
||||
if isinstance(o, datetime):
|
||||
return http_date(o.utctimetuple())
|
||||
if isinstance(o, date):
|
||||
return http_date(o.timetuple())
|
||||
if isinstance(o, uuid.UUID):
|
||||
return str(o)
|
||||
if hasattr(o, '__html__'):
|
||||
return text_type(o.__html__())
|
||||
return _json.JSONEncoder.default(self, o)
|
||||
|
||||
|
||||
class JSONDecoder(_json.JSONDecoder):
|
||||
"""The default JSON decoder. This one does not change the behavior from
|
||||
the default simplejson decoder. Consult the :mod:`json` documentation
|
||||
for more information. This decoder is not only used for the load
|
||||
functions of this module but also :attr:`~flask.Request`.
|
||||
"""
|
||||
|
||||
|
||||
def _dump_arg_defaults(kwargs, app=None):
|
||||
"""Inject default arguments for dump functions."""
|
||||
if app is None:
|
||||
app = current_app
|
||||
|
||||
if app:
|
||||
bp = app.blueprints.get(request.blueprint) if request else None
|
||||
kwargs.setdefault(
|
||||
'cls', bp.json_encoder if bp and bp.json_encoder else app.json_encoder
|
||||
)
|
||||
|
||||
if not app.config['JSON_AS_ASCII']:
|
||||
kwargs.setdefault('ensure_ascii', False)
|
||||
|
||||
kwargs.setdefault('sort_keys', app.config['JSON_SORT_KEYS'])
|
||||
else:
|
||||
kwargs.setdefault('sort_keys', True)
|
||||
kwargs.setdefault('cls', JSONEncoder)
|
||||
|
||||
|
||||
def _load_arg_defaults(kwargs, app=None):
|
||||
"""Inject default arguments for load functions."""
|
||||
if app is None:
|
||||
app = current_app
|
||||
|
||||
if app:
|
||||
bp = app.blueprints.get(request.blueprint) if request else None
|
||||
kwargs.setdefault(
|
||||
'cls',
|
||||
bp.json_decoder if bp and bp.json_decoder
|
||||
else app.json_decoder
|
||||
)
|
||||
else:
|
||||
kwargs.setdefault('cls', JSONDecoder)
|
||||
|
||||
|
||||
def detect_encoding(data):
|
||||
"""Detect which UTF codec was used to encode the given bytes.
|
||||
|
||||
The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is
|
||||
accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big
|
||||
or little endian. Some editors or libraries may prepend a BOM.
|
||||
|
||||
:param data: Bytes in unknown UTF encoding.
|
||||
:return: UTF encoding name
|
||||
"""
|
||||
head = data[:4]
|
||||
|
||||
if head[:3] == codecs.BOM_UTF8:
|
||||
return 'utf-8-sig'
|
||||
|
||||
if b'\x00' not in head:
|
||||
return 'utf-8'
|
||||
|
||||
if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE):
|
||||
return 'utf-32'
|
||||
|
||||
if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE):
|
||||
return 'utf-16'
|
||||
|
||||
if len(head) == 4:
|
||||
if head[:3] == b'\x00\x00\x00':
|
||||
return 'utf-32-be'
|
||||
|
||||
if head[::2] == b'\x00\x00':
|
||||
return 'utf-16-be'
|
||||
|
||||
if head[1:] == b'\x00\x00\x00':
|
||||
return 'utf-32-le'
|
||||
|
||||
if head[1::2] == b'\x00\x00':
|
||||
return 'utf-16-le'
|
||||
|
||||
if len(head) == 2:
|
||||
return 'utf-16-be' if head.startswith(b'\x00') else 'utf-16-le'
|
||||
|
||||
return 'utf-8'
|
||||
|
||||
|
||||
def dumps(obj, app=None, **kwargs):
|
||||
"""Serialize ``obj`` to a JSON-formatted string. If there is an
|
||||
app context pushed, use the current app's configured encoder
|
||||
(:attr:`~flask.Flask.json_encoder`), or fall back to the default
|
||||
:class:`JSONEncoder`.
|
||||
|
||||
Takes the same arguments as the built-in :func:`json.dumps`, and
|
||||
does some extra configuration based on the application. If the
|
||||
simplejson package is installed, it is preferred.
|
||||
|
||||
:param obj: Object to serialize to JSON.
|
||||
:param app: App instance to use to configure the JSON encoder.
|
||||
Uses ``current_app`` if not given, and falls back to the default
|
||||
encoder when not in an app context.
|
||||
:param kwargs: Extra arguments passed to :func:`json.dumps`.
|
||||
|
||||
.. versionchanged:: 1.0.3
|
||||
|
||||
``app`` can be passed directly, rather than requiring an app
|
||||
context for configuration.
|
||||
"""
|
||||
_dump_arg_defaults(kwargs, app=app)
|
||||
encoding = kwargs.pop('encoding', None)
|
||||
rv = _json.dumps(obj, **kwargs)
|
||||
if encoding is not None and isinstance(rv, text_type):
|
||||
rv = rv.encode(encoding)
|
||||
return rv
|
||||
|
||||
|
||||
def dump(obj, fp, app=None, **kwargs):
|
||||
"""Like :func:`dumps` but writes into a file object."""
|
||||
_dump_arg_defaults(kwargs, app=app)
|
||||
encoding = kwargs.pop('encoding', None)
|
||||
if encoding is not None:
|
||||
fp = _wrap_writer_for_text(fp, encoding)
|
||||
_json.dump(obj, fp, **kwargs)
|
||||
|
||||
|
||||
def loads(s, app=None, **kwargs):
|
||||
"""Deserialize an object from a JSON-formatted string ``s``. If
|
||||
there is an app context pushed, use the current app's configured
|
||||
decoder (:attr:`~flask.Flask.json_decoder`), or fall back to the
|
||||
default :class:`JSONDecoder`.
|
||||
|
||||
Takes the same arguments as the built-in :func:`json.loads`, and
|
||||
does some extra configuration based on the application. If the
|
||||
simplejson package is installed, it is preferred.
|
||||
|
||||
:param s: JSON string to deserialize.
|
||||
:param app: App instance to use to configure the JSON decoder.
|
||||
Uses ``current_app`` if not given, and falls back to the default
|
||||
encoder when not in an app context.
|
||||
:param kwargs: Extra arguments passed to :func:`json.dumps`.
|
||||
|
||||
.. versionchanged:: 1.0.3
|
||||
|
||||
``app`` can be passed directly, rather than requiring an app
|
||||
context for configuration.
|
||||
"""
|
||||
_load_arg_defaults(kwargs, app=app)
|
||||
if isinstance(s, bytes):
|
||||
encoding = kwargs.pop('encoding', None)
|
||||
if encoding is None:
|
||||
encoding = detect_encoding(s)
|
||||
s = s.decode(encoding)
|
||||
return _json.loads(s, **kwargs)
|
||||
|
||||
|
||||
def load(fp, app=None, **kwargs):
|
||||
"""Like :func:`loads` but reads from a file object."""
|
||||
_load_arg_defaults(kwargs, app=app)
|
||||
if not PY2:
|
||||
fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8')
|
||||
return _json.load(fp, **kwargs)
|
||||
|
||||
|
||||
def htmlsafe_dumps(obj, **kwargs):
|
||||
"""Works exactly like :func:`dumps` but is safe for use in ``<script>``
|
||||
tags. It accepts the same arguments and returns a JSON string. Note that
|
||||
this is available in templates through the ``|tojson`` filter which will
|
||||
also mark the result as safe. Due to how this function escapes certain
|
||||
characters this is safe even if used outside of ``<script>`` tags.
|
||||
|
||||
The following characters are escaped in strings:
|
||||
|
||||
- ``<``
|
||||
- ``>``
|
||||
- ``&``
|
||||
- ``'``
|
||||
|
||||
This makes it safe to embed such strings in any place in HTML with the
|
||||
notable exception of double quoted attributes. In that case single
|
||||
quote your attributes or HTML escape it in addition.
|
||||
|
||||
.. versionchanged:: 0.10
|
||||
This function's return value is now always safe for HTML usage, even
|
||||
if outside of script tags or if used in XHTML. This rule does not
|
||||
hold true when using this function in HTML attributes that are double
|
||||
quoted. Always single quote attributes if you use the ``|tojson``
|
||||
filter. Alternatively use ``|tojson|forceescape``.
|
||||
"""
|
||||
rv = dumps(obj, **kwargs) \
|
||||
.replace(u'<', u'\\u003c') \
|
||||
.replace(u'>', u'\\u003e') \
|
||||
.replace(u'&', u'\\u0026') \
|
||||
.replace(u"'", u'\\u0027')
|
||||
if not _slash_escape:
|
||||
rv = rv.replace('\\/', '/')
|
||||
return rv
|
||||
|
||||
|
||||
def htmlsafe_dump(obj, fp, **kwargs):
|
||||
"""Like :func:`htmlsafe_dumps` but writes into a file object."""
|
||||
fp.write(text_type(htmlsafe_dumps(obj, **kwargs)))
|
||||
|
||||
|
||||
def jsonify(*args, **kwargs):
|
||||
"""This function wraps :func:`dumps` to add a few enhancements that make
|
||||
life easier. It turns the JSON output into a :class:`~flask.Response`
|
||||
object with the :mimetype:`application/json` mimetype. For convenience, it
|
||||
also converts multiple arguments into an array or multiple keyword arguments
|
||||
into a dict. This means that both ``jsonify(1,2,3)`` and
|
||||
``jsonify([1,2,3])`` serialize to ``[1,2,3]``.
|
||||
|
||||
For clarity, the JSON serialization behavior has the following differences
|
||||
from :func:`dumps`:
|
||||
|
||||
1. Single argument: Passed straight through to :func:`dumps`.
|
||||
2. Multiple arguments: Converted to an array before being passed to
|
||||
:func:`dumps`.
|
||||
3. Multiple keyword arguments: Converted to a dict before being passed to
|
||||
:func:`dumps`.
|
||||
4. Both args and kwargs: Behavior undefined and will throw an exception.
|
||||
|
||||
Example usage::
|
||||
|
||||
from flask import jsonify
|
||||
|
||||
@app.route('/_get_current_user')
|
||||
def get_current_user():
|
||||
return jsonify(username=g.user.username,
|
||||
email=g.user.email,
|
||||
id=g.user.id)
|
||||
|
||||
This will send a JSON response like this to the browser::
|
||||
|
||||
{
|
||||
"username": "admin",
|
||||
"email": "admin@localhost",
|
||||
"id": 42
|
||||
}
|
||||
|
||||
|
||||
.. versionchanged:: 0.11
|
||||
Added support for serializing top-level arrays. This introduces a
|
||||
security risk in ancient browsers. See :ref:`json-security` for details.
|
||||
|
||||
This function's response will be pretty printed if the
|
||||
``JSONIFY_PRETTYPRINT_REGULAR`` config parameter is set to True or the
|
||||
Flask app is running in debug mode. Compressed (not pretty) formatting
|
||||
currently means no indents and no spaces after separators.
|
||||
|
||||
.. versionadded:: 0.2
|
||||
"""
|
||||
|
||||
indent = None
|
||||
separators = (',', ':')
|
||||
|
||||
if current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] or current_app.debug:
|
||||
indent = 2
|
||||
separators = (', ', ': ')
|
||||
|
||||
if args and kwargs:
|
||||
raise TypeError('jsonify() behavior undefined when passed both args and kwargs')
|
||||
elif len(args) == 1: # single args are passed directly to dumps()
|
||||
data = args[0]
|
||||
else:
|
||||
data = args or kwargs
|
||||
|
||||
return current_app.response_class(
|
||||
dumps(data, indent=indent, separators=separators) + '\n',
|
||||
mimetype=current_app.config['JSONIFY_MIMETYPE']
|
||||
)
|
||||
|
||||
|
||||
def tojson_filter(obj, **kwargs):
|
||||
return Markup(htmlsafe_dumps(obj, **kwargs))
|
||||
@@ -1,300 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Tagged JSON
|
||||
~~~~~~~~~~~
|
||||
|
||||
A compact representation for lossless serialization of non-standard JSON types.
|
||||
:class:`~flask.sessions.SecureCookieSessionInterface` uses this to serialize
|
||||
the session data, but it may be useful in other places. It can be extended to
|
||||
support other types.
|
||||
|
||||
.. autoclass:: TaggedJSONSerializer
|
||||
:members:
|
||||
|
||||
.. autoclass:: JSONTag
|
||||
:members:
|
||||
|
||||
Let's seen an example that adds support for :class:`~collections.OrderedDict`.
|
||||
Dicts don't have an order in Python or JSON, so to handle this we will dump
|
||||
the items as a list of ``[key, value]`` pairs. Subclass :class:`JSONTag` and
|
||||
give it the new key ``' od'`` to identify the type. The session serializer
|
||||
processes dicts first, so insert the new tag at the front of the order since
|
||||
``OrderedDict`` must be processed before ``dict``. ::
|
||||
|
||||
from flask.json.tag import JSONTag
|
||||
|
||||
class TagOrderedDict(JSONTag):
|
||||
__slots__ = ('serializer',)
|
||||
key = ' od'
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, OrderedDict)
|
||||
|
||||
def to_json(self, value):
|
||||
return [[k, self.serializer.tag(v)] for k, v in iteritems(value)]
|
||||
|
||||
def to_python(self, value):
|
||||
return OrderedDict(value)
|
||||
|
||||
app.session_interface.serializer.register(TagOrderedDict, index=0)
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from base64 import b64decode, b64encode
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from jinja2 import Markup
|
||||
from werkzeug.http import http_date, parse_date
|
||||
|
||||
from flask._compat import iteritems, text_type
|
||||
from flask.json import dumps, loads
|
||||
|
||||
|
||||
class JSONTag(object):
|
||||
"""Base class for defining type tags for :class:`TaggedJSONSerializer`."""
|
||||
|
||||
__slots__ = ('serializer',)
|
||||
|
||||
#: The tag to mark the serialized object with. If ``None``, this tag is
|
||||
#: only used as an intermediate step during tagging.
|
||||
key = None
|
||||
|
||||
def __init__(self, serializer):
|
||||
"""Create a tagger for the given serializer."""
|
||||
self.serializer = serializer
|
||||
|
||||
def check(self, value):
|
||||
"""Check if the given value should be tagged by this tag."""
|
||||
raise NotImplementedError
|
||||
|
||||
def to_json(self, value):
|
||||
"""Convert the Python object to an object that is a valid JSON type.
|
||||
The tag will be added later."""
|
||||
raise NotImplementedError
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert the JSON representation back to the correct type. The tag
|
||||
will already be removed."""
|
||||
raise NotImplementedError
|
||||
|
||||
def tag(self, value):
|
||||
"""Convert the value to a valid JSON type and add the tag structure
|
||||
around it."""
|
||||
return {self.key: self.to_json(value)}
|
||||
|
||||
|
||||
class TagDict(JSONTag):
|
||||
"""Tag for 1-item dicts whose only key matches a registered tag.
|
||||
|
||||
Internally, the dict key is suffixed with `__`, and the suffix is removed
|
||||
when deserializing.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
key = ' di'
|
||||
|
||||
def check(self, value):
|
||||
return (
|
||||
isinstance(value, dict)
|
||||
and len(value) == 1
|
||||
and next(iter(value)) in self.serializer.tags
|
||||
)
|
||||
|
||||
def to_json(self, value):
|
||||
key = next(iter(value))
|
||||
return {key + '__': self.serializer.tag(value[key])}
|
||||
|
||||
def to_python(self, value):
|
||||
key = next(iter(value))
|
||||
return {key[:-2]: value[key]}
|
||||
|
||||
|
||||
class PassDict(JSONTag):
|
||||
__slots__ = ()
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, dict)
|
||||
|
||||
def to_json(self, value):
|
||||
# JSON objects may only have string keys, so don't bother tagging the
|
||||
# key here.
|
||||
return dict((k, self.serializer.tag(v)) for k, v in iteritems(value))
|
||||
|
||||
tag = to_json
|
||||
|
||||
|
||||
class TagTuple(JSONTag):
|
||||
__slots__ = ()
|
||||
key = ' t'
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, tuple)
|
||||
|
||||
def to_json(self, value):
|
||||
return [self.serializer.tag(item) for item in value]
|
||||
|
||||
def to_python(self, value):
|
||||
return tuple(value)
|
||||
|
||||
|
||||
class PassList(JSONTag):
|
||||
__slots__ = ()
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, list)
|
||||
|
||||
def to_json(self, value):
|
||||
return [self.serializer.tag(item) for item in value]
|
||||
|
||||
tag = to_json
|
||||
|
||||
|
||||
class TagBytes(JSONTag):
|
||||
__slots__ = ()
|
||||
key = ' b'
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, bytes)
|
||||
|
||||
def to_json(self, value):
|
||||
return b64encode(value).decode('ascii')
|
||||
|
||||
def to_python(self, value):
|
||||
return b64decode(value)
|
||||
|
||||
|
||||
class TagMarkup(JSONTag):
|
||||
"""Serialize anything matching the :class:`~flask.Markup` API by
|
||||
having a ``__html__`` method to the result of that method. Always
|
||||
deserializes to an instance of :class:`~flask.Markup`."""
|
||||
|
||||
__slots__ = ()
|
||||
key = ' m'
|
||||
|
||||
def check(self, value):
|
||||
return callable(getattr(value, '__html__', None))
|
||||
|
||||
def to_json(self, value):
|
||||
return text_type(value.__html__())
|
||||
|
||||
def to_python(self, value):
|
||||
return Markup(value)
|
||||
|
||||
|
||||
class TagUUID(JSONTag):
|
||||
__slots__ = ()
|
||||
key = ' u'
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, UUID)
|
||||
|
||||
def to_json(self, value):
|
||||
return value.hex
|
||||
|
||||
def to_python(self, value):
|
||||
return UUID(value)
|
||||
|
||||
|
||||
class TagDateTime(JSONTag):
|
||||
__slots__ = ()
|
||||
key = ' d'
|
||||
|
||||
def check(self, value):
|
||||
return isinstance(value, datetime)
|
||||
|
||||
def to_json(self, value):
|
||||
return http_date(value)
|
||||
|
||||
def to_python(self, value):
|
||||
return parse_date(value)
|
||||
|
||||
|
||||
class TaggedJSONSerializer(object):
|
||||
"""Serializer that uses a tag system to compactly represent objects that
|
||||
are not JSON types. Passed as the intermediate serializer to
|
||||
:class:`itsdangerous.Serializer`.
|
||||
|
||||
The following extra types are supported:
|
||||
|
||||
* :class:`dict`
|
||||
* :class:`tuple`
|
||||
* :class:`bytes`
|
||||
* :class:`~flask.Markup`
|
||||
* :class:`~uuid.UUID`
|
||||
* :class:`~datetime.datetime`
|
||||
"""
|
||||
|
||||
__slots__ = ('tags', 'order')
|
||||
|
||||
#: Tag classes to bind when creating the serializer. Other tags can be
|
||||
#: added later using :meth:`~register`.
|
||||
default_tags = [
|
||||
TagDict, PassDict, TagTuple, PassList, TagBytes, TagMarkup, TagUUID,
|
||||
TagDateTime,
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
self.tags = {}
|
||||
self.order = []
|
||||
|
||||
for cls in self.default_tags:
|
||||
self.register(cls)
|
||||
|
||||
def register(self, tag_class, force=False, index=None):
|
||||
"""Register a new tag with this serializer.
|
||||
|
||||
:param tag_class: tag class to register. Will be instantiated with this
|
||||
serializer instance.
|
||||
:param force: overwrite an existing tag. If false (default), a
|
||||
:exc:`KeyError` is raised.
|
||||
:param index: index to insert the new tag in the tag order. Useful when
|
||||
the new tag is a special case of an existing tag. If ``None``
|
||||
(default), the tag is appended to the end of the order.
|
||||
|
||||
:raise KeyError: if the tag key is already registered and ``force`` is
|
||||
not true.
|
||||
"""
|
||||
tag = tag_class(self)
|
||||
key = tag.key
|
||||
|
||||
if key is not None:
|
||||
if not force and key in self.tags:
|
||||
raise KeyError("Tag '{0}' is already registered.".format(key))
|
||||
|
||||
self.tags[key] = tag
|
||||
|
||||
if index is None:
|
||||
self.order.append(tag)
|
||||
else:
|
||||
self.order.insert(index, tag)
|
||||
|
||||
def tag(self, value):
|
||||
"""Convert a value to a tagged representation if necessary."""
|
||||
for tag in self.order:
|
||||
if tag.check(value):
|
||||
return tag.tag(value)
|
||||
|
||||
return value
|
||||
|
||||
def untag(self, value):
|
||||
"""Convert a tagged representation back to the original type."""
|
||||
if len(value) != 1:
|
||||
return value
|
||||
|
||||
key = next(iter(value))
|
||||
|
||||
if key not in self.tags:
|
||||
return value
|
||||
|
||||
return self.tags[key].to_python(value[key])
|
||||
|
||||
def dumps(self, value):
|
||||
"""Tag the value and dump it to a compact JSON string."""
|
||||
return dumps(self.tag(value), separators=(',', ':'))
|
||||
|
||||
def loads(self, value):
|
||||
"""Load data from a JSON string and deserialized any tagged objects."""
|
||||
return loads(value, object_hook=self.untag)
|
||||
@@ -1,78 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.logging
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
from .globals import request
|
||||
|
||||
|
||||
@LocalProxy
|
||||
def wsgi_errors_stream():
|
||||
"""Find the most appropriate error stream for the application. If a request
|
||||
is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``.
|
||||
|
||||
If you configure your own :class:`logging.StreamHandler`, you may want to
|
||||
use this for the stream. If you are using file or dict configuration and
|
||||
can't import this directly, you can refer to it as
|
||||
``ext://flask.logging.wsgi_errors_stream``.
|
||||
"""
|
||||
return request.environ['wsgi.errors'] if request else sys.stderr
|
||||
|
||||
|
||||
def has_level_handler(logger):
|
||||
"""Check if there is a handler in the logging chain that will handle the
|
||||
given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`.
|
||||
"""
|
||||
level = logger.getEffectiveLevel()
|
||||
current = logger
|
||||
|
||||
while current:
|
||||
if any(handler.level <= level for handler in current.handlers):
|
||||
return True
|
||||
|
||||
if not current.propagate:
|
||||
break
|
||||
|
||||
current = current.parent
|
||||
|
||||
return False
|
||||
|
||||
|
||||
#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format
|
||||
#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``.
|
||||
default_handler = logging.StreamHandler(wsgi_errors_stream)
|
||||
default_handler.setFormatter(logging.Formatter(
|
||||
'[%(asctime)s] %(levelname)s in %(module)s: %(message)s'
|
||||
))
|
||||
|
||||
|
||||
def create_logger(app):
|
||||
"""Get the ``'flask.app'`` logger and configure it if needed.
|
||||
|
||||
When :attr:`~flask.Flask.debug` is enabled, set the logger level to
|
||||
:data:`logging.DEBUG` if it is not set.
|
||||
|
||||
If there is no handler for the logger's effective level, add a
|
||||
:class:`~logging.StreamHandler` for
|
||||
:func:`~flask.logging.wsgi_errors_stream` with a basic format.
|
||||
"""
|
||||
logger = logging.getLogger('flask.app')
|
||||
|
||||
if app.debug and logger.level == logging.NOTSET:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
if not has_level_handler(logger):
|
||||
logger.addHandler(default_handler)
|
||||
|
||||
return logger
|
||||
@@ -1,385 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.sessions
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
Implements cookie based sessions based on itsdangerous.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
|
||||
from itsdangerous import BadSignature, URLSafeTimedSerializer
|
||||
from werkzeug.datastructures import CallbackDict
|
||||
|
||||
from flask._compat import collections_abc
|
||||
from flask.helpers import is_ip, total_seconds
|
||||
from flask.json.tag import TaggedJSONSerializer
|
||||
|
||||
|
||||
class SessionMixin(collections_abc.MutableMapping):
|
||||
"""Expands a basic dictionary with session attributes."""
|
||||
|
||||
@property
|
||||
def permanent(self):
|
||||
"""This reflects the ``'_permanent'`` key in the dict."""
|
||||
return self.get('_permanent', False)
|
||||
|
||||
@permanent.setter
|
||||
def permanent(self, value):
|
||||
self['_permanent'] = bool(value)
|
||||
|
||||
#: Some implementations can detect whether a session is newly
|
||||
#: created, but that is not guaranteed. Use with caution. The mixin
|
||||
# default is hard-coded ``False``.
|
||||
new = False
|
||||
|
||||
#: Some implementations can detect changes to the session and set
|
||||
#: this when that happens. The mixin default is hard coded to
|
||||
#: ``True``.
|
||||
modified = True
|
||||
|
||||
#: Some implementations can detect when session data is read or
|
||||
#: written and set this when that happens. The mixin default is hard
|
||||
#: coded to ``True``.
|
||||
accessed = True
|
||||
|
||||
|
||||
class SecureCookieSession(CallbackDict, SessionMixin):
|
||||
"""Base class for sessions based on signed cookies.
|
||||
|
||||
This session backend will set the :attr:`modified` and
|
||||
:attr:`accessed` attributes. It cannot reliably track whether a
|
||||
session is new (vs. empty), so :attr:`new` remains hard coded to
|
||||
``False``.
|
||||
"""
|
||||
|
||||
#: When data is changed, this is set to ``True``. Only the session
|
||||
#: dictionary itself is tracked; if the session contains mutable
|
||||
#: data (for example a nested dict) then this must be set to
|
||||
#: ``True`` manually when modifying that data. The session cookie
|
||||
#: will only be written to the response if this is ``True``.
|
||||
modified = False
|
||||
|
||||
#: When data is read or written, this is set to ``True``. Used by
|
||||
# :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie``
|
||||
#: header, which allows caching proxies to cache different pages for
|
||||
#: different users.
|
||||
accessed = False
|
||||
|
||||
def __init__(self, initial=None):
|
||||
def on_update(self):
|
||||
self.modified = True
|
||||
self.accessed = True
|
||||
|
||||
super(SecureCookieSession, self).__init__(initial, on_update)
|
||||
|
||||
def __getitem__(self, key):
|
||||
self.accessed = True
|
||||
return super(SecureCookieSession, self).__getitem__(key)
|
||||
|
||||
def get(self, key, default=None):
|
||||
self.accessed = True
|
||||
return super(SecureCookieSession, self).get(key, default)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
self.accessed = True
|
||||
return super(SecureCookieSession, self).setdefault(key, default)
|
||||
|
||||
|
||||
class NullSession(SecureCookieSession):
|
||||
"""Class used to generate nicer error messages if sessions are not
|
||||
available. Will still allow read-only access to the empty session
|
||||
but fail on setting.
|
||||
"""
|
||||
|
||||
def _fail(self, *args, **kwargs):
|
||||
raise RuntimeError('The session is unavailable because no secret '
|
||||
'key was set. Set the secret_key on the '
|
||||
'application to something unique and secret.')
|
||||
__setitem__ = __delitem__ = clear = pop = popitem = \
|
||||
update = setdefault = _fail
|
||||
del _fail
|
||||
|
||||
|
||||
class SessionInterface(object):
|
||||
"""The basic interface you have to implement in order to replace the
|
||||
default session interface which uses werkzeug's securecookie
|
||||
implementation. The only methods you have to implement are
|
||||
:meth:`open_session` and :meth:`save_session`, the others have
|
||||
useful defaults which you don't need to change.
|
||||
|
||||
The session object returned by the :meth:`open_session` method has to
|
||||
provide a dictionary like interface plus the properties and methods
|
||||
from the :class:`SessionMixin`. We recommend just subclassing a dict
|
||||
and adding that mixin::
|
||||
|
||||
class Session(dict, SessionMixin):
|
||||
pass
|
||||
|
||||
If :meth:`open_session` returns ``None`` Flask will call into
|
||||
:meth:`make_null_session` to create a session that acts as replacement
|
||||
if the session support cannot work because some requirement is not
|
||||
fulfilled. The default :class:`NullSession` class that is created
|
||||
will complain that the secret key was not set.
|
||||
|
||||
To replace the session interface on an application all you have to do
|
||||
is to assign :attr:`flask.Flask.session_interface`::
|
||||
|
||||
app = Flask(__name__)
|
||||
app.session_interface = MySessionInterface()
|
||||
|
||||
.. versionadded:: 0.8
|
||||
"""
|
||||
|
||||
#: :meth:`make_null_session` will look here for the class that should
|
||||
#: be created when a null session is requested. Likewise the
|
||||
#: :meth:`is_null_session` method will perform a typecheck against
|
||||
#: this type.
|
||||
null_session_class = NullSession
|
||||
|
||||
#: A flag that indicates if the session interface is pickle based.
|
||||
#: This can be used by Flask extensions to make a decision in regards
|
||||
#: to how to deal with the session object.
|
||||
#:
|
||||
#: .. versionadded:: 0.10
|
||||
pickle_based = False
|
||||
|
||||
def make_null_session(self, app):
|
||||
"""Creates a null session which acts as a replacement object if the
|
||||
real session support could not be loaded due to a configuration
|
||||
error. This mainly aids the user experience because the job of the
|
||||
null session is to still support lookup without complaining but
|
||||
modifications are answered with a helpful error message of what
|
||||
failed.
|
||||
|
||||
This creates an instance of :attr:`null_session_class` by default.
|
||||
"""
|
||||
return self.null_session_class()
|
||||
|
||||
def is_null_session(self, obj):
|
||||
"""Checks if a given object is a null session. Null sessions are
|
||||
not asked to be saved.
|
||||
|
||||
This checks if the object is an instance of :attr:`null_session_class`
|
||||
by default.
|
||||
"""
|
||||
return isinstance(obj, self.null_session_class)
|
||||
|
||||
def get_cookie_domain(self, app):
|
||||
"""Returns the domain that should be set for the session cookie.
|
||||
|
||||
Uses ``SESSION_COOKIE_DOMAIN`` if it is configured, otherwise
|
||||
falls back to detecting the domain based on ``SERVER_NAME``.
|
||||
|
||||
Once detected (or if not set at all), ``SESSION_COOKIE_DOMAIN`` is
|
||||
updated to avoid re-running the logic.
|
||||
"""
|
||||
|
||||
rv = app.config['SESSION_COOKIE_DOMAIN']
|
||||
|
||||
# set explicitly, or cached from SERVER_NAME detection
|
||||
# if False, return None
|
||||
if rv is not None:
|
||||
return rv if rv else None
|
||||
|
||||
rv = app.config['SERVER_NAME']
|
||||
|
||||
# server name not set, cache False to return none next time
|
||||
if not rv:
|
||||
app.config['SESSION_COOKIE_DOMAIN'] = False
|
||||
return None
|
||||
|
||||
# chop off the port which is usually not supported by browsers
|
||||
# remove any leading '.' since we'll add that later
|
||||
rv = rv.rsplit(':', 1)[0].lstrip('.')
|
||||
|
||||
if '.' not in rv:
|
||||
# Chrome doesn't allow names without a '.'
|
||||
# this should only come up with localhost
|
||||
# hack around this by not setting the name, and show a warning
|
||||
warnings.warn(
|
||||
'"{rv}" is not a valid cookie domain, it must contain a ".".'
|
||||
' Add an entry to your hosts file, for example'
|
||||
' "{rv}.localdomain", and use that instead.'.format(rv=rv)
|
||||
)
|
||||
app.config['SESSION_COOKIE_DOMAIN'] = False
|
||||
return None
|
||||
|
||||
ip = is_ip(rv)
|
||||
|
||||
if ip:
|
||||
warnings.warn(
|
||||
'The session cookie domain is an IP address. This may not work'
|
||||
' as intended in some browsers. Add an entry to your hosts'
|
||||
' file, for example "localhost.localdomain", and use that'
|
||||
' instead.'
|
||||
)
|
||||
|
||||
# if this is not an ip and app is mounted at the root, allow subdomain
|
||||
# matching by adding a '.' prefix
|
||||
if self.get_cookie_path(app) == '/' and not ip:
|
||||
rv = '.' + rv
|
||||
|
||||
app.config['SESSION_COOKIE_DOMAIN'] = rv
|
||||
return rv
|
||||
|
||||
def get_cookie_path(self, app):
|
||||
"""Returns the path for which the cookie should be valid. The
|
||||
default implementation uses the value from the ``SESSION_COOKIE_PATH``
|
||||
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
|
||||
uses ``/`` if it's ``None``.
|
||||
"""
|
||||
return app.config['SESSION_COOKIE_PATH'] \
|
||||
or app.config['APPLICATION_ROOT']
|
||||
|
||||
def get_cookie_httponly(self, app):
|
||||
"""Returns True if the session cookie should be httponly. This
|
||||
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
|
||||
config var.
|
||||
"""
|
||||
return app.config['SESSION_COOKIE_HTTPONLY']
|
||||
|
||||
def get_cookie_secure(self, app):
|
||||
"""Returns True if the cookie should be secure. This currently
|
||||
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
|
||||
"""
|
||||
return app.config['SESSION_COOKIE_SECURE']
|
||||
|
||||
def get_cookie_samesite(self, app):
|
||||
"""Return ``'Strict'`` or ``'Lax'`` if the cookie should use the
|
||||
``SameSite`` attribute. This currently just returns the value of
|
||||
the :data:`SESSION_COOKIE_SAMESITE` setting.
|
||||
"""
|
||||
return app.config['SESSION_COOKIE_SAMESITE']
|
||||
|
||||
def get_expiration_time(self, app, session):
|
||||
"""A helper method that returns an expiration date for the session
|
||||
or ``None`` if the session is linked to the browser session. The
|
||||
default implementation returns now + the permanent session
|
||||
lifetime configured on the application.
|
||||
"""
|
||||
if session.permanent:
|
||||
return datetime.utcnow() + app.permanent_session_lifetime
|
||||
|
||||
def should_set_cookie(self, app, session):
|
||||
"""Used by session backends to determine if a ``Set-Cookie`` header
|
||||
should be set for this session cookie for this response. If the session
|
||||
has been modified, the cookie is set. If the session is permanent and
|
||||
the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is
|
||||
always set.
|
||||
|
||||
This check is usually skipped if the session was deleted.
|
||||
|
||||
.. versionadded:: 0.11
|
||||
"""
|
||||
|
||||
return session.modified or (
|
||||
session.permanent and app.config['SESSION_REFRESH_EACH_REQUEST']
|
||||
)
|
||||
|
||||
def open_session(self, app, request):
|
||||
"""This method has to be implemented and must either return ``None``
|
||||
in case the loading failed because of a configuration error or an
|
||||
instance of a session object which implements a dictionary like
|
||||
interface + the methods and attributes on :class:`SessionMixin`.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def save_session(self, app, session, response):
|
||||
"""This is called for actual sessions returned by :meth:`open_session`
|
||||
at the end of the request. This is still called during a request
|
||||
context so if you absolutely need access to the request you can do
|
||||
that.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
session_json_serializer = TaggedJSONSerializer()
|
||||
|
||||
|
||||
class SecureCookieSessionInterface(SessionInterface):
|
||||
"""The default session interface that stores sessions in signed cookies
|
||||
through the :mod:`itsdangerous` module.
|
||||
"""
|
||||
#: the salt that should be applied on top of the secret key for the
|
||||
#: signing of cookie based sessions.
|
||||
salt = 'cookie-session'
|
||||
#: the hash function to use for the signature. The default is sha1
|
||||
digest_method = staticmethod(hashlib.sha1)
|
||||
#: the name of the itsdangerous supported key derivation. The default
|
||||
#: is hmac.
|
||||
key_derivation = 'hmac'
|
||||
#: A python serializer for the payload. The default is a compact
|
||||
#: JSON derived serializer with support for some extra Python types
|
||||
#: such as datetime objects or tuples.
|
||||
serializer = session_json_serializer
|
||||
session_class = SecureCookieSession
|
||||
|
||||
def get_signing_serializer(self, app):
|
||||
if not app.secret_key:
|
||||
return None
|
||||
signer_kwargs = dict(
|
||||
key_derivation=self.key_derivation,
|
||||
digest_method=self.digest_method
|
||||
)
|
||||
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
|
||||
serializer=self.serializer,
|
||||
signer_kwargs=signer_kwargs)
|
||||
|
||||
def open_session(self, app, request):
|
||||
s = self.get_signing_serializer(app)
|
||||
if s is None:
|
||||
return None
|
||||
val = request.cookies.get(app.session_cookie_name)
|
||||
if not val:
|
||||
return self.session_class()
|
||||
max_age = total_seconds(app.permanent_session_lifetime)
|
||||
try:
|
||||
data = s.loads(val, max_age=max_age)
|
||||
return self.session_class(data)
|
||||
except BadSignature:
|
||||
return self.session_class()
|
||||
|
||||
def save_session(self, app, session, response):
|
||||
domain = self.get_cookie_domain(app)
|
||||
path = self.get_cookie_path(app)
|
||||
|
||||
# If the session is modified to be empty, remove the cookie.
|
||||
# If the session is empty, return without setting the cookie.
|
||||
if not session:
|
||||
if session.modified:
|
||||
response.delete_cookie(
|
||||
app.session_cookie_name,
|
||||
domain=domain,
|
||||
path=path
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
# Add a "Vary: Cookie" header if the session was accessed at all.
|
||||
if session.accessed:
|
||||
response.vary.add('Cookie')
|
||||
|
||||
if not self.should_set_cookie(app, session):
|
||||
return
|
||||
|
||||
httponly = self.get_cookie_httponly(app)
|
||||
secure = self.get_cookie_secure(app)
|
||||
samesite = self.get_cookie_samesite(app)
|
||||
expires = self.get_expiration_time(app, session)
|
||||
val = self.get_signing_serializer(app).dumps(dict(session))
|
||||
response.set_cookie(
|
||||
app.session_cookie_name,
|
||||
val,
|
||||
expires=expires,
|
||||
httponly=httponly,
|
||||
domain=domain,
|
||||
path=path,
|
||||
secure=secure,
|
||||
samesite=samesite
|
||||
)
|
||||
@@ -1,57 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
flask.signals
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Implements signals based on blinker if available, otherwise
|
||||
falls silently back to a noop.
|
||||
|
||||
:copyright: © 2010 by the Pallets team.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
signals_available = False
|
||||
try:
|
||||
from blinker import Namespace
|
||||
signals_available = True
|
||||
except ImportError:
|
||||
class Namespace(object):
|
||||
def signal(self, name, doc=None):
|
||||
return _FakeSignal(name, doc)
|
||||
|
||||
class _FakeSignal(object):
|
||||
"""If blinker is unavailable, create a fake class with the same
|
||||
interface that allows sending of signals but will fail with an
|
||||
error on anything else. Instead of doing anything on send, it
|
||||
will just ignore the arguments and do nothing instead.
|
||||
"""
|
||||
|
||||
def __init__(self, name, doc=None):
|
||||
self.name = name
|
||||
self.__doc__ = doc
|
||||
def _fail(self, *args, **kwargs):
|
||||
raise RuntimeError('signalling support is unavailable '
|
||||
'because the blinker library is '
|
||||
'not installed.')
|
||||
send = lambda *a, **kw: None
|
||||
connect = disconnect = has_receivers_for = receivers_for = \
|
||||
temporarily_connected_to = connected_to = _fail
|
||||
del _fail
|
||||
|
||||
# The namespace for code signals. If you are not Flask code, do
|
||||
# not put signals in here. Create your own namespace instead.
|
||||
_signals = Namespace()
|
||||
|
||||
|
||||
# Core signals. For usage examples grep the source code or consult
|
||||
# the API documentation in docs/api.rst as well as docs/signals.rst
|
||||
template_rendered = _signals.signal('template-rendered')
|
||||
before_render_template = _signals.signal('before-render-template')
|
||||
request_started = _signals.signal('request-started')
|
||||
request_finished = _signals.signal('request-finished')
|
||||
request_tearing_down = _signals.signal('request-tearing-down')
|
||||
got_request_exception = _signals.signal('got-request-exception')
|
||||
appcontext_tearing_down = _signals.signal('appcontext-tearing-down')
|
||||
appcontext_pushed = _signals.signal('appcontext-pushed')
|
||||
appcontext_popped = _signals.signal('appcontext-popped')
|
||||
message_flashed = _signals.signal('message-flashed')
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user