Permalink
Please sign in to comment.
Browse files
Initial copy of the hg tip - no history yet, that will come later
- Loading branch information...
Showing
with
11,538 additions
and 2 deletions.
- +508 −0 CHANGES
- +13 −0 COPYING
- +202 −0 LICENSE
- +9 −0 MANIFEST.in
- +13 −0 NOTICE
- +206 −2 README.md
- +2,026 −0 doc/twitter.html
- +71 −0 examples/shorten_url.py
- +141 −0 examples/tweet.py
- +69 −0 examples/twitter-to-xhtml.py
- +91 −0 get_access_token.py
- +50 −0 python-twitter.spec
- +73 −0 setup.py
- +316 −0 simplejson/__init__.py
- +2,265 −0 simplejson/_speedups.c
- +348 −0 simplejson/decoder.py
- +436 −0 simplejson/encoder.py
- +65 −0 simplejson/scanner.py
- +35 −0 simplejson/tool.py
- +1 −0 testdata/direct_message-destroy.json
- +1 −0 testdata/direct_messages-new.json
- +1 −0 testdata/direct_messages.json
- +1 −0 testdata/featured.json
- +1 −0 testdata/followers.json
- +1 −0 testdata/friends.json
- +1 −0 testdata/friends_timeline-kesuke.json
- +1 −0 testdata/friendship-create.json
- +1 −0 testdata/friendship-destroy.json
- +1 −0 testdata/public_timeline.json
- +1 −0 testdata/public_timeline_error.json
- +1 −0 testdata/replies.json
- +1 −0 testdata/show-89512102.json
- +1 −0 testdata/show-dewitt.json
- +1 −0 testdata/status-destroy.json
- +1 −0 testdata/update.json
- +1 −0 testdata/user_timeline-kesuke.json
- +1 −0 testdata/user_timeline.json
- +3,969 −0 twitter.py
- +614 −0 twitter_test.py
508
CHANGES
| @@ -0,0 +1,508 @@ | ||
| +2011-12-03 | ||
| + | ||
| + https://code.google.com/p/python-twitter/source/detail?r=263fe2a0db8be23347e92b81d6ab3c33b4ef292f | ||
| + Comment by qfuxiang to the above changeset | ||
| + The base url was wrong for the Followers API calls | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=213 | ||
| + Add include_entities parameter to GetStatus() | ||
| + Patch by gaelenh | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=214 | ||
| + Change PostUpdate() so that it takes the shortened link into | ||
| + account. Small tweak to the patch provided to make the | ||
| + shortened-link length set by a API value instead of a constant. | ||
| + Patch by ceesjan.ytec | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=216 | ||
| + AttributeError handles the fact that win* doesn't implement | ||
| + os.getlogin() | ||
| + Patch by yaleman | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=217 | ||
| + As described at https://dev.twitter.com/docs/api/1/get/trends, | ||
| + GET trends (corresponding to Api.GetTrendsCurrent) is now | ||
| + deprecated in favor of GET trends/:woeid. GET trends also now | ||
| + requires authentication, while trends/:woeid doesn't. | ||
| + Patch and excellent description by jessica.mckellar | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=218 | ||
| + Currently, two Trends containing the same information | ||
| + (name, query, and timestamp) aren't considered equal because | ||
| + __eq__ isn't overridden, like it is for Status, User, and the | ||
| + other Twitter objects. | ||
| + Patch and excellent description by jessica.mckellar | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=220 | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=211 | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=206 | ||
| + All variations on a theme - basically Twitter is returning | ||
| + something different for an error payload. Changed code to | ||
| + check for both 'error' and 'errors'. | ||
| + | ||
| +2011-05-08 | ||
| + | ||
| + https://code.google.com/p/python-twitter/issues/detail?id=184 | ||
| + A comment in this issue made me realize that the parameter sanity | ||
| + check for max_id was missing in GetMentions() - added | ||
| + | ||
| + First pass at working in some of the cursor support that has been | ||
| + in the Twitter API but we haven't made full use of - still working | ||
| + out the small issues. | ||
| + | ||
| +2011-04-16 | ||
| + | ||
| + bumped version to 0.8.3 | ||
| + released 0.8.2 to PyPI | ||
| + bumped version to 0.8.2 | ||
| + | ||
| + Issue 193 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=193 | ||
| + Missing retweet_count field on Status object | ||
| + Patch (with minor tweaks) by from alissonp | ||
| + | ||
| + Issue 181 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=181 | ||
| + Add oauth2 to install_requires parameter list and also updated | ||
| + README to note that the oauth2 lib can be found in two locations | ||
| + | ||
| + Issue 182, Issue 137, Issue 93, Issue 190 | ||
| + language value missing from User object | ||
| + Added 'lang' item and also some others that were needed: | ||
| + verified, notifications, contributors_enabled and listed_count | ||
| + patches by wreinerat, apetresc, jpwigan and ghills | ||
| + | ||
| +2011-02-26 | ||
| + | ||
| + Issue 166 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=166 | ||
| + Added a basic, but sadly needed, check when parsing the json | ||
| + returned by Twitter as Twitter has a habit of returning the | ||
| + failwhale HTML page for a json api call :( | ||
| + Patch (with minor tweaks) by adam.aviv | ||
| + | ||
| + Issue 187 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=187 | ||
| + Applied patch by edward.hades to fix issue where MaximumHitFrequency | ||
| + returns 0 when requests are maxed out | ||
| + | ||
| + Issue 184 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=184 | ||
| + Applied patch by jmstaley to put into the GetUserTimeline API | ||
| + parameter list the max_id value (it was being completely ignored) | ||
| + | ||
| +2011-02-20 | ||
| + | ||
| + Added retweeted to Status class | ||
| + Fixed Status class to return Hashtags list in AsDict() call | ||
| + | ||
| + Issue 185 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=185 | ||
| + Added retweeted_status to Status class - patch by edward.hades | ||
| + | ||
| + Issue 183 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=183 | ||
| + Removed errant print statement - reported by ProgVal | ||
| + | ||
| +2010-12-21 | ||
| + | ||
| + Setting version to 0.8.1 | ||
| + | ||
| + Issue 179 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=179 | ||
| + Added MANIFEST.in to give setup.py sdist some clues as to what | ||
| + files to include in the tarball | ||
| + | ||
| +2010-11-14 | ||
| + | ||
| + Setting version to 0.8 for a bit as having a branch for this is | ||
| + really overkill, i'll just take DeWitt advice and tag it when | ||
| + the release is out the door | ||
| + | ||
| + Issue 175 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=175 | ||
| + Added geo_enabled to User class - basic parts of patch provided | ||
| + by adam.aviv with other bits added by me to allow it to pass tests | ||
| + | ||
| + Issue 174 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=174 | ||
| + Added parts of adam.aviv's patch - the bits that add new field items | ||
| + to the Status class. | ||
| + | ||
| + Issue 159 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=159 | ||
| + Added patch form adam.aviv to make the term parameter for GetSearch() | ||
| + optional if geocode parameter is supplied | ||
| + | ||
| +2010-11-03 | ||
| + | ||
| + Ran pydoc to generate docs | ||
| + | ||
| +2010-10-16 | ||
| + | ||
| + Fixed bad date in previous CHANGES entry | ||
| + | ||
| + Fixed source of the python-oauth2 library we use: from brosner | ||
| + to simplegeo | ||
| + | ||
| + I made a pass thru the docstrings and updated many to be the | ||
| + text from the current Twitter API docs. Also fixed numerous | ||
| + whitespace issues and did a s/[optional]/[Optional]/ change. | ||
| + | ||
| + Imported work by Colin Howe that he did to get the tests working. | ||
| + http://code.google.com/r/colinthehowe-python-twitter-tests/source/detail?r=6cff589aca9c955df8354fe4d8e302ec4a2eb31c | ||
| + http://code.google.com/r/colinthehowe-python-twitter-tests/source/detail?r=cab8e32d7a9c34c66d2e75eebc7a1ba6e1eac8ce | ||
| + http://code.google.com/r/colinthehowe-python-twitter-tests/source/detail?r=b434d9e5dd7b989ae24483477e3f00b1ad362cc5 | ||
| + | ||
| + Issue 169 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=169 | ||
| + Patch by yaemog which adds missing Trends support. | ||
| + | ||
| + Issue 168 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=168 | ||
| + Only cache successful results as suggested by yaemog. | ||
| + | ||
| + Issue 111 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=111 | ||
| + Added a new GetUserRetweets() call as suggested by yash888 | ||
| + Patch given was adjusted to reflect the current code requirements. | ||
| + | ||
| + Issue 110 | ||
| + Added a VerifyCredentials() sample call to the README example | ||
| + | ||
| + Issue 105 | ||
| + Added support for the page parameter to GetFriendsTimeline() | ||
| + as requested by jauderho. | ||
| + I also updated GetFriendsTimeline() to follow the current | ||
| + Twitter API documentation | ||
| + | ||
| + Somewhere in the patch frenzy of today an extra GetStatus() | ||
| + def was introduced!?! Luckily it was caught by the tests. | ||
| + wooo tests! \m/ | ||
| + | ||
| + Setting version to 0.8 | ||
| + | ||
| + r0.8 branch created and trunk set to version 0.9-devel | ||
| + | ||
| +2010-09-26 | ||
| + | ||
| + Issue 150 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=150 | ||
| + Patch by blhobbes which removes a double quoting issue that | ||
| + was happening for GetSearch() | ||
| + Reported by huubhuubbarbatruuk | ||
| + | ||
| + Issue 160 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=160 | ||
| + Patch by yaemog which adds support for include_rts and | ||
| + include_entities support to GetUserTimeline and GetPublicTimeline | ||
| + Small tweaks post-patch | ||
| + | ||
| + Applied docstring tweak suggested by dclinton in revision comment | ||
| + http://code.google.com/p/python-twitter/source/detail?r=a858412e38f7e3856fef924291ef039284d3a6e1 | ||
| + Thanks for the catch! | ||
| + | ||
| + Issue 164 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=164 | ||
| + Patch by yaemog which adds GetRetweets support. | ||
| + Small tweaks and two typo fixes post-patch. | ||
| + | ||
| + Issue 165 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=165 | ||
| + Patch by yaemog which adds GetStatus support. | ||
| + Small tweaks post-patch | ||
| + | ||
| + Issue 163 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=163 | ||
| + Patch by yaemog which adds users/lookup support. | ||
| + Small tweaks to docstring only post-patch. | ||
| + | ||
| + Changed username/password parameter to Api class to be | ||
| + consumer_key/consumer_secret to better match the new | ||
| + oAuth only world that Twitter has demanded. | ||
| + | ||
| + Added debugHTTP to the parameter list to Api class to | ||
| + control if/when the urllib debug output is displayed. | ||
| + | ||
| +2010-08-25 | ||
| + | ||
| + First pass at adding list support. | ||
| + Added a new List class and also added to the Api class | ||
| + new methods for working with lists: | ||
| + | ||
| + CreateList(self, user, name, mode=None, description=None) | ||
| + DestroyList(self, user, id) | ||
| + CreateSubscription(self, owner, list) | ||
| + DestroySubscription(self, owner, list) | ||
| + GetSubscriptions(self, user, cursor=-1) | ||
| + GetLists(self, user, cursor=-1) | ||
| + | ||
| +2010-08-24 | ||
| + | ||
| + Fixed introduced bug in the Destroy* and Create* API calls | ||
| + where any of the routines were passing in an empty dict for | ||
| + POST data. Before the oAuth change that was enough to tell | ||
| + _FetchUrl() to use POST instead of GET but now a non-empty | ||
| + dict is required. | ||
| + | ||
| + Issue 144 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=144 | ||
| + GetFriends() where it was failing with a 'unicode object has | ||
| + no attribute get'. This was caused when Twitter changed how | ||
| + they return the JSON data. It used to be a straight list but | ||
| + now there are some elements *and* then the list. | ||
| + | ||
| +2010-08-18 | ||
| + | ||
| + Applied the json/simplejson part of the patch found | ||
| + in Issue 64 (http://code.google.com/p/python-twitter/issues/detail?id=64) | ||
| + Patch provided by Thomas Bohmbach | ||
| + | ||
| + Applied patch provided by liris.pp in Issue 147 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=147 | ||
| + Ensures that during a PostStatus we count the length using a unicode aware | ||
| + len() routine. Tweaked patch slightly to take into account that the | ||
| + twitter.Api() instance may have been setup with None for input_encoding. | ||
| + | ||
| +2010-08-17 | ||
| + | ||
| + Fixed error in the POST path for _FetchUrl() where by | ||
| + I show to the world that yes, I do make last minute | ||
| + changes and completely forget to test them :( | ||
| + Thanks to Peter Sanchez for finding and pointing to | ||
| + working code that showed the fix | ||
| + | ||
| +2010-08-15 | ||
| + | ||
| + Added more help text (I hope it helps) to the README | ||
| + and also to get_access_token.py. | ||
| + | ||
| + Added doctext notes to twitter.Api() parameter list | ||
| + to explain more about oAuth. | ||
| + | ||
| + Added import exception handling for parse_qs() and | ||
| + parse_qsl() as it seems those funcitons moved between | ||
| + 2.5 and 2.6 so the oAuth update broke the lib under | ||
| + python2.5. Thanks to Rich for the bug find (sorry | ||
| + it had to be found the hard way!) | ||
| + | ||
| + from changeset 184:60315000989c by DeWitt | ||
| + Update the generated twitter.py docs to match the trunk | ||
| + | ||
| +2010-08-14 | ||
| + | ||
| + Fixed silly typo in _FetchUrl() when doing a POST | ||
| + Thanks to Peter Sanchez for the find and fix! | ||
| + | ||
| + Added some really basic text to the get_access_token.py | ||
| + startup output that explains why, for now, you need to | ||
| + visit Twitter and get an Application key/secret to use | ||
| + this library | ||
| + | ||
| +2010-08-12 | ||
| + | ||
| + Updated code to use python-oauth2 library for authentication. | ||
| + Twitter has set a deadline, 2010-08-16 as of this change, for | ||
| + the switch from Basic to oAuth. | ||
| + | ||
| + The oAuth integration was inspired by the work done by | ||
| + Hameedullah Khan and others. | ||
| + | ||
| + The change to using python-oauth2 library was done purely to | ||
| + align python-twitter with an oauth library that was maintained | ||
| + and had tests to try and minimize grief moving forward. | ||
| + | ||
| + Slipped into GetFriendsTimeline() a new parameter, retweets, to | ||
| + allow the call to pull from the "friends_timeline" or the | ||
| + "home_timeline". | ||
| + | ||
| + Fixed some typos and white-space issues and also updated the | ||
| + README to point to the new Twitter Dev site. | ||
| + | ||
| +2010-08-02 | ||
| + | ||
| + Updated copyright information. | ||
| + | ||
| +2010-06-13 | ||
| + | ||
| + Applied changeset from nicdumz repo nicdumz-cleaner-python-twitter | ||
| + r=07df3feee06c8d0f9961596e5fceae9e74493d25 | ||
| + datetime is required for MaximumHitFrequency | ||
| + | ||
| + Applied changeset from nicdumz repo nicdumz-cleaner-python-twitter | ||
| + r=dd669dff32d101856ed6e50fe8bd938640b04d77 | ||
| + update source URLs in README | ||
| + | ||
| + Applied changeset from nicdumz repo nicdumz-cleaner-python-twitter | ||
| + r=8f0796d7fdcea17f4162aeb22d3c36cb603088c7 | ||
| + adjust tests to reflect http://twitter.com -> https://twitter.com change | ||
| + | ||
| + Applied changeset from nicdumz repo nicdumz-cleaner-python-twitter | ||
| + r=3c05b8ebe59eca226d9eaef2760cecca9d50944a | ||
| + tests: add .info() method to objects returned by our Mockup handler | ||
| + This is required to completely mimick urllib, and have successful | ||
| + response.headers attribute accesses. | ||
| + | ||
| + Applied partial patch for Issue 113 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=113 | ||
| + | ||
| + The partial bit means we changed the parameter from "page" to "cursor" | ||
| + so the call would work. What was left out was a more direct way | ||
| + to return the cursor value *after* the call and also in the patch | ||
| + they also changed the method to return an iterator. | ||
| + | ||
| +2010-05-17 | ||
| + | ||
| + Issue 50 http://code.google.com/p/python-twitter/issues/detail?id=50 | ||
| + Applied patch by wheaties.box that implements a new method to return | ||
| + the Rate Limit Status and also adds the new method MaximumHitFrequency | ||
| + | ||
| + Multiple typo, indent and whitespace tweaks | ||
| + | ||
| + Issue 60 http://code.google.com/p/python-twitter/issues/detail?id=60 | ||
| + Pulled out new GetFavorites and GetMentions methods from the patch | ||
| + submitted by joegermuska | ||
| + | ||
| + Issue 62 http://code.google.com/p/python-twitter/issues/detail?id=62 | ||
| + Applied patch from lukev123 that adds gzip compression to the GET | ||
| + requests sent to Twitter. The patch was modified to default gzip to | ||
| + False and to allow the twitter.API class instantiation to set the | ||
| + value to True. This was done to not change current default | ||
| + behaviour radically. | ||
| + | ||
| + Issue 80 http://code.google.com/p/python-twitter/issues/detail?id=80 | ||
| + Fixed PostUpdate() call example in the README | ||
| + | ||
| +2010-05-16 | ||
| + | ||
| + Issue 19 http://code.google.com/p/python-twitter/issues/detail?id=19 | ||
| + TinyURL example and the idea for this comes from a bug filed by | ||
| + acolorado with patch provided by ghills. | ||
| + | ||
| + Issue 37 http://code.google.com/p/python-twitter/issues/detail?id=37 | ||
| + Added base_url to the twitter.API class init call to allow the user | ||
| + to override the default https://twitter.com base. Since Twitter now | ||
| + supports https for all calls I (bear) changed the patch to default to | ||
| + https instead of http. | ||
| + Original issue by kotecha.ravi, patch by wiennat and with implementation | ||
| + tweaks by bear. | ||
| + | ||
| + Issue 45 http://code.google.com/p/python-twitter/issues/detail?id=45 | ||
| + Two grammar fixes for relative_created_at property | ||
| + Patches by thomasdyson and chris.boardman07 | ||
| + | ||
| +2010-01-24 | ||
| + | ||
| + Applying patch submitted to fix Issue 70 | ||
| + http://code.google.com/p/python-twitter/issues/detail?id=70 | ||
| + | ||
| + The patch was originally submitted by user ghills, adapted by livibetter and | ||
| + adapted even further by JimMoefoe (read the comments for the full details :) ) | ||
| + | ||
| + Applying patch submitted by markus.magnuson to add new method GetFriendIDs | ||
| + Issue 94 http://code.google.com/p/python-twitter/issues/detail?id=94 | ||
| + | ||
| +2009-06-13 | ||
| + | ||
| + Releasing 0.6 to help people avoid the Twitpocalypse. | ||
| + | ||
| +2009-05-03 | ||
| + | ||
| + Support hashlib in addition to the older md5 library. | ||
| + | ||
| +2009-03-11 | ||
| + | ||
| + Added page parameter to GetReplies, GetFriends, GetFollowers, and GetDirectMessages | ||
| + | ||
| +2009-03-03 | ||
| + | ||
| + Added count parameter to GetFriendsTimeline | ||
| + | ||
| +2009-03-01 | ||
| + Add PostUpdates, which automatically splits long text into multiple updates. | ||
| + | ||
| +2009-02-25 | ||
| + | ||
| + Add in_reply_to_status_id to api.PostUpdate | ||
| + | ||
| +2009-02-21 | ||
| + | ||
| + Wrap any error responses in a TwitterError | ||
| + Add since_id to GetFriendsTimeline and GetUserTimeline | ||
| + | ||
| +2009-02-20 | ||
| + | ||
| + Added since and since_id to Api.GetReplies | ||
| + | ||
| +2008-07-10 | ||
| + | ||
| + Added new properties to User and Status classes. | ||
| + Removed spurious self-import of the twitter module | ||
| + Added a NOTICE file | ||
| + Require simplejson 2.x or later | ||
| + Added get/create/destroy favorite flags for status messages. | ||
| + Bug fix for non-tty devices. | ||
| + | ||
| +2007-09-13 | ||
| + | ||
| + Unset the executable bit on README. | ||
| + | ||
| +2007-09-13 | ||
| + | ||
| + Released version 0.5. | ||
| + Added back support for setuptools (conditionally) | ||
| + Added support for X-Twitter-* HTTP headers | ||
| + Fixed the tests to work across all timezones | ||
| + Removed the 140 character limit from PostUpdate | ||
| + Added support for per-user tmp cache directories | ||
| + | ||
| +2007-06-13 | ||
| + | ||
| + Released 0.4. | ||
| + Fixed a unicode error that prevented tweet.py from working. | ||
| + Added DestroyStatus | ||
| + Added DestroyDirectMessage | ||
| + Added CreateFriendship | ||
| + Added DestoryFriendship | ||
| + | ||
| +2007-06-03 | ||
| + | ||
| + Fixed the bug that prevented unicode strings being posted | ||
| + Username and password now set on twitter.Api, not individual method calls | ||
| + Added SetCredentials and ClearCredentials | ||
| + Added GetUser ("users/show" in the twitter web api) | ||
| + Added GetFeatured | ||
| + Added GetDirectMessages | ||
| + Added GetStatus ("statuses/show" in the twitter web api) | ||
| + Added GetReplies | ||
| + Added optional since_id parameter on GetPublicTimeline | ||
| + Added optional since parameter on GetUserTimeline | ||
| + Added optional since and user parameters on GetFriendsTimeline | ||
| + Added optional user parameter on GetFriends | ||
| + | ||
| +2007-04-27 | ||
| + | ||
| + Modified examples/twitter-to-xhtml.py to handle unicode | ||
| + Dropped dependency on setuptools (too complicated/buggy) | ||
| + Added unicode test cases | ||
| + Fixed issue 2 "Rename needs an unlink in front" | ||
| + | ||
| +2007-04-02 | ||
| + | ||
| + Released 0.3. | ||
| + Use gmtime not localtime to calculate relative_created_at. | ||
| + | ||
| +2007-03-26 | ||
| + | ||
| + Released 0.2 | ||
| + GetUserTimeline can accept userid or username. | ||
| + | ||
| +2007-03-21 | ||
| + | ||
| + Calculate relative_created_at on the fly | ||
| + | ||
| +2007-01-28 | ||
| + | ||
| + Released 0.1 | ||
| + Initial checkin of python-twitter | ||
| + |
13
COPYING
| @@ -0,0 +1,13 @@ | ||
| + Copyright 2007 The Python-Twitter Developers | ||
| + | ||
| + Licensed under the Apache License, Version 2.0 (the "License"); | ||
| + you may not use this file except in compliance with the License. | ||
| + You may obtain a copy of the License at | ||
| + | ||
| + http://www.apache.org/licenses/LICENSE-2.0 | ||
| + | ||
| + Unless required by applicable law or agreed to in writing, software | ||
| + distributed under the License is distributed on an "AS IS" BASIS, | ||
| + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| + See the License for the specific language governing permissions and | ||
| + limitations under the License. |
202
LICENSE
| @@ -0,0 +1,202 @@ | ||
| + | ||
| + Apache License | ||
| + Version 2.0, January 2004 | ||
| + http://www.apache.org/licenses/ | ||
| + | ||
| + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | ||
| + | ||
| + 1. Definitions. | ||
| + | ||
| + "License" shall mean the terms and conditions for use, reproduction, | ||
| + and distribution as defined by Sections 1 through 9 of this document. | ||
| + | ||
| + "Licensor" shall mean the copyright owner or entity authorized by | ||
| + the copyright owner that is granting the License. | ||
| + | ||
| + "Legal Entity" shall mean the union of the acting entity and all | ||
| + other entities that control, are controlled by, or are under common | ||
| + control with that entity. For the purposes of this definition, | ||
| + "control" means (i) the power, direct or indirect, to cause the | ||
| + direction or management of such entity, whether by contract or | ||
| + otherwise, or (ii) ownership of fifty percent (50%) or more of the | ||
| + outstanding shares, or (iii) beneficial ownership of such entity. | ||
| + | ||
| + "You" (or "Your") shall mean an individual or Legal Entity | ||
| + exercising permissions granted by this License. | ||
| + | ||
| + "Source" form shall mean the preferred form for making modifications, | ||
| + including but not limited to software source code, documentation | ||
| + source, and configuration files. | ||
| + | ||
| + "Object" form shall mean any form resulting from mechanical | ||
| + transformation or translation of a Source form, including but | ||
| + not limited to compiled object code, generated documentation, | ||
| + and conversions to other media types. | ||
| + | ||
| + "Work" shall mean the work of authorship, whether in Source or | ||
| + Object form, made available under the License, as indicated by a | ||
| + copyright notice that is included in or attached to the work | ||
| + (an example is provided in the Appendix below). | ||
| + | ||
| + "Derivative Works" shall mean any work, whether in Source or Object | ||
| + form, that is based on (or derived from) the Work and for which the | ||
| + editorial revisions, annotations, elaborations, or other modifications | ||
| + represent, as a whole, an original work of authorship. For the purposes | ||
| + of this License, Derivative Works shall not include works that remain | ||
| + separable from, or merely link (or bind by name) to the interfaces of, | ||
| + the Work and Derivative Works thereof. | ||
| + | ||
| + "Contribution" shall mean any work of authorship, including | ||
| + the original version of the Work and any modifications or additions | ||
| + to that Work or Derivative Works thereof, that is intentionally | ||
| + submitted to Licensor for inclusion in the Work by the copyright owner | ||
| + or by an individual or Legal Entity authorized to submit on behalf of | ||
| + the copyright owner. For the purposes of this definition, "submitted" | ||
| + means any form of electronic, verbal, or written communication sent | ||
| + to the Licensor or its representatives, including but not limited to | ||
| + communication on electronic mailing lists, source code control systems, | ||
| + and issue tracking systems that are managed by, or on behalf of, the | ||
| + Licensor for the purpose of discussing and improving the Work, but | ||
| + excluding communication that is conspicuously marked or otherwise | ||
| + designated in writing by the copyright owner as "Not a Contribution." | ||
| + | ||
| + "Contributor" shall mean Licensor and any individual or Legal Entity | ||
| + on behalf of whom a Contribution has been received by Licensor and | ||
| + subsequently incorporated within the Work. | ||
| + | ||
| + 2. Grant of Copyright License. Subject to the terms and conditions of | ||
| + this License, each Contributor hereby grants to You a perpetual, | ||
| + worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||
| + copyright license to reproduce, prepare Derivative Works of, | ||
| + publicly display, publicly perform, sublicense, and distribute the | ||
| + Work and such Derivative Works in Source or Object form. | ||
| + | ||
| + 3. Grant of Patent License. Subject to the terms and conditions of | ||
| + this License, each Contributor hereby grants to You a perpetual, | ||
| + worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||
| + (except as stated in this section) patent license to make, have made, | ||
| + use, offer to sell, sell, import, and otherwise transfer the Work, | ||
| + where such license applies only to those patent claims licensable | ||
| + by such Contributor that are necessarily infringed by their | ||
| + Contribution(s) alone or by combination of their Contribution(s) | ||
| + with the Work to which such Contribution(s) was submitted. If You | ||
| + institute patent litigation against any entity (including a | ||
| + cross-claim or counterclaim in a lawsuit) alleging that the Work | ||
| + or a Contribution incorporated within the Work constitutes direct | ||
| + or contributory patent infringement, then any patent licenses | ||
| + granted to You under this License for that Work shall terminate | ||
| + as of the date such litigation is filed. | ||
| + | ||
| + 4. Redistribution. You may reproduce and distribute copies of the | ||
| + Work or Derivative Works thereof in any medium, with or without | ||
| + modifications, and in Source or Object form, provided that You | ||
| + meet the following conditions: | ||
| + | ||
| + (a) You must give any other recipients of the Work or | ||
| + Derivative Works a copy of this License; and | ||
| + | ||
| + (b) You must cause any modified files to carry prominent notices | ||
| + stating that You changed the files; and | ||
| + | ||
| + (c) You must retain, in the Source form of any Derivative Works | ||
| + that You distribute, all copyright, patent, trademark, and | ||
| + attribution notices from the Source form of the Work, | ||
| + excluding those notices that do not pertain to any part of | ||
| + the Derivative Works; and | ||
| + | ||
| + (d) If the Work includes a "NOTICE" text file as part of its | ||
| + distribution, then any Derivative Works that You distribute must | ||
| + include a readable copy of the attribution notices contained | ||
| + within such NOTICE file, excluding those notices that do not | ||
| + pertain to any part of the Derivative Works, in at least one | ||
| + of the following places: within a NOTICE text file distributed | ||
| + as part of the Derivative Works; within the Source form or | ||
| + documentation, if provided along with the Derivative Works; or, | ||
| + within a display generated by the Derivative Works, if and | ||
| + wherever such third-party notices normally appear. The contents | ||
| + of the NOTICE file are for informational purposes only and | ||
| + do not modify the License. You may add Your own attribution | ||
| + notices within Derivative Works that You distribute, alongside | ||
| + or as an addendum to the NOTICE text from the Work, provided | ||
| + that such additional attribution notices cannot be construed | ||
| + as modifying the License. | ||
| + | ||
| + You may add Your own copyright statement to Your modifications and | ||
| + may provide additional or different license terms and conditions | ||
| + for use, reproduction, or distribution of Your modifications, or | ||
| + for any such Derivative Works as a whole, provided Your use, | ||
| + reproduction, and distribution of the Work otherwise complies with | ||
| + the conditions stated in this License. | ||
| + | ||
| + 5. Submission of Contributions. Unless You explicitly state otherwise, | ||
| + any Contribution intentionally submitted for inclusion in the Work | ||
| + by You to the Licensor shall be under the terms and conditions of | ||
| + this License, without any additional terms or conditions. | ||
| + Notwithstanding the above, nothing herein shall supersede or modify | ||
| + the terms of any separate license agreement you may have executed | ||
| + with Licensor regarding such Contributions. | ||
| + | ||
| + 6. Trademarks. This License does not grant permission to use the trade | ||
| + names, trademarks, service marks, or product names of the Licensor, | ||
| + except as required for reasonable and customary use in describing the | ||
| + origin of the Work and reproducing the content of the NOTICE file. | ||
| + | ||
| + 7. Disclaimer of Warranty. Unless required by applicable law or | ||
| + agreed to in writing, Licensor provides the Work (and each | ||
| + Contributor provides its Contributions) on an "AS IS" BASIS, | ||
| + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | ||
| + implied, including, without limitation, any warranties or conditions | ||
| + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | ||
| + PARTICULAR PURPOSE. You are solely responsible for determining the | ||
| + appropriateness of using or redistributing the Work and assume any | ||
| + risks associated with Your exercise of permissions under this License. | ||
| + | ||
| + 8. Limitation of Liability. In no event and under no legal theory, | ||
| + whether in tort (including negligence), contract, or otherwise, | ||
| + unless required by applicable law (such as deliberate and grossly | ||
| + negligent acts) or agreed to in writing, shall any Contributor be | ||
| + liable to You for damages, including any direct, indirect, special, | ||
| + incidental, or consequential damages of any character arising as a | ||
| + result of this License or out of the use or inability to use the | ||
| + Work (including but not limited to damages for loss of goodwill, | ||
| + work stoppage, computer failure or malfunction, or any and all | ||
| + other commercial damages or losses), even if such Contributor | ||
| + has been advised of the possibility of such damages. | ||
| + | ||
| + 9. Accepting Warranty or Additional Liability. While redistributing | ||
| + the Work or Derivative Works thereof, You may choose to offer, | ||
| + and charge a fee for, acceptance of support, warranty, indemnity, | ||
| + or other liability obligations and/or rights consistent with this | ||
| + License. However, in accepting such obligations, You may act only | ||
| + on Your own behalf and on Your sole responsibility, not on behalf | ||
| + of any other Contributor, and only if You agree to indemnify, | ||
| + defend, and hold each Contributor harmless for any liability | ||
| + incurred by, or claims asserted against, such Contributor by reason | ||
| + of your accepting any such warranty or additional liability. | ||
| + | ||
| + END OF TERMS AND CONDITIONS | ||
| + | ||
| + APPENDIX: How to apply the Apache License to your work. | ||
| + | ||
| + To apply the Apache License to your work, attach the following | ||
| + boilerplate notice, with the fields enclosed by brackets "[]" | ||
| + replaced with your own identifying information. (Don't include | ||
| + the brackets!) The text should be enclosed in the appropriate | ||
| + comment syntax for the file format. We also recommend that a | ||
| + file or class name and description of purpose be included on the | ||
| + same "printed page" as the copyright notice for easier | ||
| + identification within third-party archives. | ||
| + | ||
| + Copyright [yyyy] [name of copyright owner] | ||
| + | ||
| + Licensed under the Apache License, Version 2.0 (the "License"); | ||
| + you may not use this file except in compliance with the License. | ||
| + You may obtain a copy of the License at | ||
| + | ||
| + http://www.apache.org/licenses/LICENSE-2.0 | ||
| + | ||
| + Unless required by applicable law or agreed to in writing, software | ||
| + distributed under the License is distributed on an "AS IS" BASIS, | ||
| + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| + See the License for the specific language governing permissions and | ||
| + limitations under the License. |
9
MANIFEST.in
| @@ -0,0 +1,9 @@ | ||
| +include CHANGES | ||
| +include COPYING | ||
| +include LICENSE | ||
| +include NOTICE | ||
| +include README | ||
| +include *.py | ||
| +recursive-include examples *.py | ||
| +recursive-include doc *.html | ||
| +prune .DS_Store |
13
NOTICE
| @@ -0,0 +1,13 @@ | ||
| +NOTICE | ||
| + | ||
| +The simplejson library (http://simplejson.googlecode.com) is used under the terms of the MIT license and is copyright Bob Ippolito. | ||
| +See http://simplejson.googlecode.com/svn/trunk/LICENSE.txt for details. | ||
| + | ||
| +The python-oauth2 library (http://github.com/simplegeo/python-oauth2) is used under the terms of the MIT license and is copyright Leah Culver. | ||
| +See http://github.com/simplegeo/python-oauth2/blob/master/LICENSE.txt for details. | ||
| + | ||
| +The httplib2 library (http://code.google.com/p/httplib2) is used under the terms of the MIT license and is copyright Joe Gregorio. | ||
| +See http://code.google.com/p/httplib2/source/browse/python2/httplib2/__init__.py for details. | ||
| + | ||
| +This code is made available under the Apache License and is copyright the Python-Twitter Developers. | ||
| + |
208
README.md
| @@ -1,2 +1,206 @@ | ||
| -python-twitter | ||
| -============== | ||
| +=Python Twitter= | ||
| + | ||
| +_A Python wrapper around the Twitter API_ | ||
| + | ||
| +Author: `The Python-Twitter Developers <[email protected]>` | ||
| + | ||
| +==Introduction== | ||
| + | ||
| +This library provides a pure Python interface for the Twitter API. | ||
| + | ||
| +Twitter (http://twitter.com) provides a service that allows people to | ||
| +connect via the web, IM, and SMS. Twitter exposes a web services API | ||
| +(http://dev.twitter.com/doc) and this library is intended to make | ||
| +it even easier for Python programmers to use. | ||
| + | ||
| +==Building== | ||
| + | ||
| +*From source:* | ||
| + | ||
| +Install the dependencies: | ||
| + | ||
| + SimpleJson | ||
| + http://cheeseshop.python.org/pypi/simplejson | ||
| + | ||
| + SimpleGeo's OAuth2 | ||
| + http://github.com/simplegeo/python-oauth2 or | ||
| + http://pypi.python.org/pypi/oauth2 | ||
| + | ||
| + HTTPLib2 (installed along with oauth2 if you use setuptools) | ||
| + http://code.google.com/p/httplib2/ | ||
| + | ||
| +Download the latest python-twitter library from: | ||
| + | ||
| + http://code.google.com/p/python-twitter/ | ||
| + | ||
| +Extract the source distribution and run: | ||
| + | ||
| +{{{ | ||
| + $ python setup.py build | ||
| + $ python setup.py install | ||
| +}}} | ||
| + | ||
| +*Testing* | ||
| + | ||
| +With setuptools installed: | ||
| + | ||
| +{{{ | ||
| + $ python setup.py test | ||
| +}}} | ||
| + | ||
| +Without setuptools installed: | ||
| + | ||
| +{{{ | ||
| + $ python twitter_test.py | ||
| +}}} | ||
| + | ||
| +==Getting the code== | ||
| + | ||
| +View the trunk at: | ||
| + | ||
| + http://code.google.com/p/python-twitter/source/ | ||
| + | ||
| +Check out the latest development version anonymously with: | ||
| + | ||
| +{{{ | ||
| + $ hg clone http://python-twitter.googlecode.com/hg/ python-twitter | ||
| + $ cd python-twitter | ||
| + $ hg update dev | ||
| +}}} | ||
| + | ||
| +==Documentation== | ||
| + | ||
| +View the last release API documentation at: | ||
| + | ||
| + http://dev.twitter.com/doc | ||
| + | ||
| +==Using== | ||
| + | ||
| +The library provides a Python wrapper around the Twitter API and | ||
| +the Twitter data model. | ||
| + | ||
| +*Model:* | ||
| + | ||
| +The three model classes are twitter.Status, twitter.User, and | ||
| +twitter.DirectMessage. The API methods return instances of these | ||
| +classes. | ||
| + | ||
| +To read the full API for twitter.Status, twitter.User, or | ||
| +twitter.DirectMessage, run: | ||
| + | ||
| +{{{ | ||
| + $ pydoc twitter.Status | ||
| + $ pydoc twitter.User | ||
| + $ pydoc twitter.DirectMessage | ||
| +}}} | ||
| + | ||
| +*API:* | ||
| + | ||
| +The API is exposed via the twitter.Api class. | ||
| + | ||
| +To create an instance of the twitter.Api class: | ||
| + | ||
| +{{{ | ||
| + >>> import twitter | ||
| + >>> api = twitter.Api() | ||
| +}}} | ||
| + | ||
| +To create an instance of the twitter.Api with login credentials (many API | ||
| +calls required the client to be authenticated.) | ||
| + | ||
| +The python-twitter library now only supports oAuth authentication as the | ||
| +Twitter devs have indicated that oAuth is the only method that will be | ||
| +supported moving forward. | ||
| + | ||
| + >>> api = twitter.Api(consumer_key='consumer_key', | ||
| + consumer_secret='consumer_secret', | ||
| + access_token_key='access_token', | ||
| + access_token_secret='access_token_secret') | ||
| + | ||
| +To see if your credentials are successful: | ||
| + NOTE - much more than the small sample given here will print | ||
| + | ||
| + >>> print api.VerifyCredentials() | ||
| + {"id": 16133, "location": "Philadelphia", "name": "bear"} | ||
| + | ||
| +To fetch the most recently posted public Twitter status messages: | ||
| + | ||
| +{{{ | ||
| + >>> statuses = api.GetPublicTimeline() | ||
| + >>> print [s.user.name for s in statuses] | ||
| + [u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] | ||
| +}}} | ||
| + | ||
| +To fetch a single user's public status messages, where "user" is either | ||
| +a Twitter "short name" or their user id. | ||
| + | ||
| +{{{ | ||
| + >>> statuses = api.GetUserTimeline(user) | ||
| + >>> print [s.text for s in statuses] | ||
| +}}} | ||
| + | ||
| +To fetch a list a user's friends (requires authentication): | ||
| + | ||
| +{{{ | ||
| + >>> users = api.GetFriends() | ||
| + >>> print [u.name for u in users] | ||
| +}}} | ||
| + | ||
| +To post a Twitter status message (requires authentication): | ||
| + | ||
| +{{{ | ||
| + >>> status = api.PostUpdate('I love python-twitter!') | ||
| + >>> print status.text | ||
| + I love python-twitter! | ||
| +}}} | ||
| + | ||
| +There are many more API methods, to read the full API documentation: | ||
| + | ||
| +{{{ | ||
| + $ pydoc twitter.Api | ||
| +}}} | ||
| + | ||
| +==Todo== | ||
| + | ||
| +Patches and bug reports are welcome, just please keep the style | ||
| +consistent with the original source. | ||
| + | ||
| +Add more example scripts. | ||
| + | ||
| +The twitter.Status and twitter.User classes are going to be hard | ||
| +to keep in sync with the API if the API changes. More of the | ||
| +code could probably be written with introspection. | ||
| + | ||
| +Statement coverage of twitter_test is only about 80% of twitter.py. | ||
| + | ||
| +The twitter.Status and twitter.User classes could perform more | ||
| +validation on the property setters. | ||
| + | ||
| +==More Information== | ||
| + | ||
| +Please visit http://groups.google.com/group/python-twitter for more discussion. | ||
| + | ||
| +==Contributors== | ||
| + | ||
| +Additional thanks to Pierre-Jean Coudert, Omar Kilani, Jodok Batlogg, | ||
| +edleaf, glen.tregoning, Brad Choate, Jim Cortez, Jason Lemoine, Thomas | ||
| +Dyson, Robert Laquey, Hameedullah Khan, Mike Taylor, DeWitt Clinton, | ||
| +and the rest of the python-twitter mailing list. | ||
| + | ||
| +==License== | ||
| + | ||
| +{{{ | ||
| + Copyright 2007 The Python-Twitter Developers | ||
| + | ||
| + Licensed under the Apache License, Version 2.0 (the 'License'); | ||
| + you may not use this file except in compliance with the License. | ||
| + You may obtain a copy of the License at | ||
| + | ||
| + http://www.apache.org/licenses/LICENSE-2.0 | ||
| + | ||
| + Unless required by applicable law or agreed to in writing, software | ||
| + distributed under the License is distributed on an 'AS IS' BASIS, | ||
| + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| + See the License for the specific language governing permissions and | ||
| + limitations under the License. | ||
| +}}} |
2,026
doc/twitter.html
2,026 additions,
0 deletions
not shown because the diff is too large. Please use a local Git client to view these changes.
71
examples/shorten_url.py
| @@ -0,0 +1,71 @@ | ||
| +#!/usr/bin/python2.4 | ||
| +# | ||
| +# Copyright 2007 The Python-Twitter Developers | ||
| +# | ||
| +# Licensed under the Apache License, Version 2.0 (the "License"); | ||
| +# you may not use this file except in compliance with the License. | ||
| +# You may obtain a copy of the License at | ||
| +# | ||
| +# http://www.apache.org/licenses/LICENSE-2.0 | ||
| +# | ||
| +# Unless required by applicable law or agreed to in writing, software | ||
| +# distributed under the License is distributed on an "AS IS" BASIS, | ||
| +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| +# See the License for the specific language governing permissions and | ||
| +# limitations under the License. | ||
| + | ||
| +'''A class that defines the default URL Shortener. | ||
| + | ||
| +TinyURL is provided as the default and as an example. | ||
| +''' | ||
| + | ||
| +import urllib | ||
| + | ||
| + | ||
| + # Change History | ||
| + # | ||
| + # 2010-05-16 | ||
| + # TinyURL example and the idea for this comes from a bug filed by | ||
| + # acolorado with patch provided by ghills. Class implementation | ||
| + # was done by bear. | ||
| + # | ||
| + # Issue 19 http://code.google.com/p/python-twitter/issues/detail?id=19 | ||
| + # | ||
| + | ||
| + | ||
| +class ShortenURL(object): | ||
| + '''Helper class to make URL Shortener calls if/when required''' | ||
| + def __init__(self, | ||
| + userid=None, | ||
| + password=None): | ||
| + '''Instantiate a new ShortenURL object | ||
| + | ||
| + Args: | ||
| + userid: userid for any required authorization call [optional] | ||
| + password: password for any required authorization call [optional] | ||
| + ''' | ||
| + self.userid = userid | ||
| + self.password = password | ||
| + | ||
| + def Shorten(self, | ||
| + longURL): | ||
| + '''Call TinyURL API and returned shortened URL result | ||
| + | ||
| + Args: | ||
| + longURL: URL string to shorten | ||
| + | ||
| + Returns: | ||
| + The shortened URL as a string | ||
| + | ||
| + Note: | ||
| + longURL is required and no checks are made to ensure completeness | ||
| + ''' | ||
| + | ||
| + result = None | ||
| + f = urllib.urlopen("http://tinyurl.com/api-create.php?url=%s" % longURL) | ||
| + try: | ||
| + result = f.read() | ||
| + finally: | ||
| + f.close() | ||
| + | ||
| + return result |
141
examples/tweet.py
| @@ -0,0 +1,141 @@ | ||
| +#!/usr/bin/python2.4 | ||
| + | ||
| +'''Post a message to twitter''' | ||
| + | ||
| +__author__ = '[email protected]' | ||
| + | ||
| +import ConfigParser | ||
| +import getopt | ||
| +import os | ||
| +import sys | ||
| +import twitter | ||
| + | ||
| + | ||
| +USAGE = '''Usage: tweet [options] message | ||
| + | ||
| + This script posts a message to Twitter. | ||
| + | ||
| + Options: | ||
| + | ||
| + -h --help : print this help | ||
| + --consumer-key : the twitter consumer key | ||
| + --consumer-secret : the twitter consumer secret | ||
| + --access-key : the twitter access token key | ||
| + --access-secret : the twitter access token secret | ||
| + --encoding : the character set encoding used in input strings, e.g. "utf-8". [optional] | ||
| + | ||
| + Documentation: | ||
| + | ||
| + If either of the command line flags are not present, the environment | ||
| + variables TWEETUSERNAME and TWEETPASSWORD will then be checked for your | ||
| + consumer_key or consumer_secret, respectively. | ||
| + | ||
| + If neither the command line flags nor the enviroment variables are | ||
| + present, the .tweetrc file, if it exists, can be used to set the | ||
| + default consumer_key and consumer_secret. The file should contain the | ||
| + following three lines, replacing *consumer_key* with your consumer key, and | ||
| + *consumer_secret* with your consumer secret: | ||
| + | ||
| + A skeletal .tweetrc file: | ||
| + | ||
| + [Tweet] | ||
| + consumer_key: *consumer_key* | ||
| + consumer_secret: *consumer_password* | ||
| + access_key: *access_key* | ||
| + access_secret: *access_password* | ||
| + | ||
| +''' | ||
| + | ||
| +def PrintUsageAndExit(): | ||
| + print USAGE | ||
| + sys.exit(2) | ||
| + | ||
| +def GetConsumerKeyEnv(): | ||
| + return os.environ.get("TWEETUSERNAME", None) | ||
| + | ||
| +def GetConsumerSecretEnv(): | ||
| + return os.environ.get("TWEETPASSWORD", None) | ||
| + | ||
| +def GetAccessKeyEnv(): | ||
| + return os.environ.get("TWEETACCESSKEY", None) | ||
| + | ||
| +def GetAccessSecretEnv(): | ||
| + return os.environ.get("TWEETACCESSSECRET", None) | ||
| + | ||
| +class TweetRc(object): | ||
| + def __init__(self): | ||
| + self._config = None | ||
| + | ||
| + def GetConsumerKey(self): | ||
| + return self._GetOption('consumer_key') | ||
| + | ||
| + def GetConsumerSecret(self): | ||
| + return self._GetOption('consumer_secret') | ||
| + | ||
| + def GetAccessKey(self): | ||
| + return self._GetOption('access_key') | ||
| + | ||
| + def GetAccessSecret(self): | ||
| + return self._GetOption('access_secret') | ||
| + | ||
| + def _GetOption(self, option): | ||
| + try: | ||
| + return self._GetConfig().get('Tweet', option) | ||
| + except: | ||
| + return None | ||
| + | ||
| + def _GetConfig(self): | ||
| + if not self._config: | ||
| + self._config = ConfigParser.ConfigParser() | ||
| + self._config.read(os.path.expanduser('~/.tweetrc')) | ||
| + return self._config | ||
| + | ||
| +def main(): | ||
| + try: | ||
| + shortflags = 'h' | ||
| + longflags = ['help', 'consumer-key=', 'consumer-secret=', | ||
| + 'access-key=', 'access-secret=', 'encoding='] | ||
| + opts, args = getopt.gnu_getopt(sys.argv[1:], shortflags, longflags) | ||
| + except getopt.GetoptError: | ||
| + PrintUsageAndExit() | ||
| + consumer_keyflag = None | ||
| + consumer_secretflag = None | ||
| + access_keyflag = None | ||
| + access_secretflag = None | ||
| + encoding = None | ||
| + for o, a in opts: | ||
| + if o in ("-h", "--help"): | ||
| + PrintUsageAndExit() | ||
| + if o in ("--consumer-key"): | ||
| + consumer_keyflag = a | ||
| + if o in ("--consumer-secret"): | ||
| + consumer_secretflag = a | ||
| + if o in ("--access-key"): | ||
| + access_keyflag = a | ||
| + if o in ("--access-secret"): | ||
| + access_secretflag = a | ||
| + if o in ("--encoding"): | ||
| + encoding = a | ||
| + message = ' '.join(args) | ||
| + if not message: | ||
| + PrintUsageAndExit() | ||
| + rc = TweetRc() | ||
| + consumer_key = consumer_keyflag or GetConsumerKeyEnv() or rc.GetConsumerKey() | ||
| + consumer_secret = consumer_secretflag or GetConsumerSecretEnv() or rc.GetConsumerSecret() | ||
| + access_key = access_keyflag or GetAccessKeyEnv() or rc.GetAccessKey() | ||
| + access_secret = access_secretflag or GetAccessSecretEnv() or rc.GetAccessSecret() | ||
| + if not consumer_key or not consumer_secret or not access_key or not access_secret: | ||
| + PrintUsageAndExit() | ||
| + api = twitter.Api(consumer_key=consumer_key, consumer_secret=consumer_secret, | ||
| + access_token_key=access_key, access_token_secret=access_secret, | ||
| + input_encoding=encoding) | ||
| + try: | ||
| + status = api.PostUpdate(message) | ||
| + except UnicodeDecodeError: | ||
| + print "Your message could not be encoded. Perhaps it contains non-ASCII characters? " | ||
| + print "Try explicitly specifying the encoding with the --encoding flag" | ||
| + sys.exit(2) | ||
| + print "%s just posted: %s" % (status.user.name, status.text) | ||
| + | ||
| +if __name__ == "__main__": | ||
| + main() |
69
examples/twitter-to-xhtml.py
| @@ -0,0 +1,69 @@ | ||
| +#!/usr/bin/python2.4 | ||
| + | ||
| +'''Load the latest update for a Twitter user and leave it in an XHTML fragment''' | ||
| + | ||
| +__author__ = '[email protected]' | ||
| + | ||
| +import codecs | ||
| +import getopt | ||
| +import sys | ||
| +import twitter | ||
| + | ||
| +TEMPLATE = """ | ||
| +<div class="twitter"> | ||
| + <span class="twitter-user"><a href="http://twitter.com/%s">Twitter</a>: </span> | ||
| + <span class="twitter-text">%s</span> | ||
| + <span class="twitter-relative-created-at"><a href="http://twitter.com/%s/statuses/%s">Posted %s</a></span> | ||
| +</div> | ||
| +""" | ||
| + | ||
| +def Usage(): | ||
| + print 'Usage: %s [options] twitterid' % __file__ | ||
| + print ' This script fetches a users latest twitter update and stores' | ||
| + print ' the result in a file as an XHTML fragment' | ||
| + print ' Options:' | ||
| + print ' --help -h : print this help' | ||
| + print ' --output : the output file [default: stdout]' | ||
| + | ||
| + | ||
| +def FetchTwitter(user, output): | ||
| + assert user | ||
| + statuses = twitter.Api().GetUserTimeline(user=user, count=1) | ||
| + s = statuses[0] | ||
| + xhtml = TEMPLATE % (s.user.screen_name, s.text, s.user.screen_name, s.id, s.relative_created_at) | ||
| + if output: | ||
| + Save(xhtml, output) | ||
| + else: | ||
| + print xhtml | ||
| + | ||
| + | ||
| +def Save(xhtml, output): | ||
| + out = codecs.open(output, mode='w', encoding='ascii', | ||
| + errors='xmlcharrefreplace') | ||
| + out.write(xhtml) | ||
| + out.close() | ||
| + | ||
| +def main(): | ||
| + try: | ||
| + opts, args = getopt.gnu_getopt(sys.argv[1:], 'ho', ['help', 'output=']) | ||
| + except getopt.GetoptError: | ||
| + Usage() | ||
| + sys.exit(2) | ||
| + try: | ||
| + user = args[0] | ||
| + except: | ||
| + Usage() | ||
| + sys.exit(2) | ||
| + output = None | ||
| + for o, a in opts: | ||
| + if o in ("-h", "--help"): | ||
| + Usage() | ||
| + sys.exit(2) | ||
| + if o in ("-o", "--output"): | ||
| + output = a | ||
| + FetchTwitter(user, output) | ||
| + | ||
| +if __name__ == "__main__": | ||
| + main() |
91
get_access_token.py
| @@ -0,0 +1,91 @@ | ||
| +#!/usr/bin/python2.4 | ||
| +# | ||
| +# Copyright 2007 The Python-Twitter Developers | ||
| +# | ||
| +# Licensed under the Apache License, Version 2.0 (the "License"); | ||
| +# you may not use this file except in compliance with the License. | ||
| +# You may obtain a copy of the License at | ||
| +# | ||
| +# http://www.apache.org/licenses/LICENSE-2.0 | ||
| +# | ||
| +# Unless required by applicable law or agreed to in writing, software | ||
| +# distributed under the License is distributed on an "AS IS" BASIS, | ||
| +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| +# See the License for the specific language governing permissions and | ||
| +# limitations under the License. | ||
| + | ||
| + | ||
| +import os | ||
| +import sys | ||
| + | ||
| +# parse_qsl moved to urlparse module in v2.6 | ||
| +try: | ||
| + from urlparse import parse_qsl | ||
| +except: | ||
| + from cgi import parse_qsl | ||
| + | ||
| +import oauth2 as oauth | ||
| + | ||
| +REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token' | ||
| +ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' | ||
| +AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize' | ||
| +SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate' | ||
| + | ||
| +consumer_key = "g00szXVinvEk7B9vD1vUQ" | ||
| +consumer_secret = "aL1ghD3Ki4MY1k1aEoLVeUVzoKmq7uRKX16GIiw" | ||
| + | ||
| + | ||
| +if consumer_key is None or consumer_secret is None: | ||
| + print 'You need to edit this script and provide values for the' | ||
| + print 'consumer_key and also consumer_secret.' | ||
| + print '' | ||
| + print 'The values you need come from Twitter - you need to register' | ||
| + print 'as a developer your "application". This is needed only until' | ||
| + print 'Twitter finishes the idea they have of a way to allow open-source' | ||
| + print 'based libraries to have a token that can be used to generate a' | ||
| + print 'one-time use key that will allow the library to make the request' | ||
| + print 'on your behalf.' | ||
| + print '' | ||
| + sys.exit(1) | ||
| + | ||
| +signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() | ||
| +oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret) | ||
| +oauth_client = oauth.Client(oauth_consumer) | ||
| + | ||
| +print 'Requesting temp token from Twitter' | ||
| + | ||
| +resp, content = oauth_client.request(REQUEST_TOKEN_URL, 'GET') | ||
| + | ||
| +if resp['status'] != '200': | ||
| + print 'Invalid respond from Twitter requesting temp token: %s' % resp['status'] | ||
| +else: | ||
| + request_token = dict(parse_qsl(content)) | ||
| + | ||
| + print '' | ||
| + print 'Please visit this Twitter page and retrieve the pincode to be used' | ||
| + print 'in the next step to obtaining an Authentication Token:' | ||
| + print '' | ||
| + print '%s?oauth_token=%s' % (AUTHORIZATION_URL, request_token['oauth_token']) | ||
| + print '' | ||
| + | ||
| + pincode = raw_input('Pincode? ') | ||
| + | ||
| + token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) | ||
| + token.set_verifier(pincode) | ||
| + | ||
| + print '' | ||
| + print 'Generating and signing request for an access token' | ||
| + print '' | ||
| + | ||
| + oauth_client = oauth.Client(oauth_consumer, token) | ||
| + resp, content = oauth_client.request(ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % pincode) | ||
| + access_token = dict(parse_qsl(content)) | ||
| + | ||
| + if resp['status'] != '200': | ||
| + print 'The request for a Token did not succeed: %s' % resp['status'] | ||
| + print access_token | ||
| + else: | ||
| + print 'Your Twitter Access Token key: %s' % access_token['oauth_token'] | ||
| + print ' Access Token secret: %s' % access_token['oauth_token_secret'] | ||
| + print '' | ||
| + |
50
python-twitter.spec
| @@ -0,0 +1,50 @@ | ||
| +%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")} | ||
| + | ||
| +Name: python-twitter | ||
| +Version: 0.7-devel | ||
| +Release: 1%{?dist} | ||
| +Summary: Python Interface for Twitter API | ||
| + | ||
| +Group: Development/Libraries | ||
| +License: Apache License 2.0 | ||
| +URL: http://code.google.com/p/python-twitter/ | ||
| +Source0: http://python-twitter.googlecode.com/files/%{name}-%{version}.tar.gz | ||
| +BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) | ||
| + | ||
| +BuildArch: noarch | ||
| +Requires: python >= 2.4, python-simplejson >= 2.0.7 | ||
| +BuildRequires: python-setuptools | ||
| + | ||
| + | ||
| +%description | ||
| +This library provides a pure python interface for the Twitter API. | ||
| + | ||
| + | ||
| +%prep | ||
| +%setup -q | ||
| + | ||
| + | ||
| +%build | ||
| +%{__python} setup.py build | ||
| + | ||
| + | ||
| +%install | ||
| +rm -rf $RPM_BUILD_ROOT | ||
| +chmod a-x README | ||
| +%{__python} setup.py install --skip-build --root $RPM_BUILD_ROOT | ||
| + | ||
| + | ||
| +%clean | ||
| +rm -rf $RPM_BUILD_ROOT | ||
| + | ||
| + | ||
| +%files | ||
| +%defattr(-,root,root,-) | ||
| +%doc PKG-INFO README CHANGES COPYING LICENSE doc/twitter.html | ||
| +# For noarch packages: sitelib | ||
| +%{python_sitelib}/* | ||
| + | ||
| + | ||
| +%changelog | ||
| +* Sat Mar 22 2008 Steve 'Ashcrow' Milner <[email protected]> - 0.5-1 | ||
| +- Initial package. |
73
setup.py
| @@ -0,0 +1,73 @@ | ||
| +#!/usr/bin/python2.4 | ||
| +# | ||
| +# Copyright 2007 The Python-Twitter Developers | ||
| +# | ||
| +# Licensed under the Apache License, Version 2.0 (the "License"); | ||
| +# you may not use this file except in compliance with the License. | ||
| +# You may obtain a copy of the License at | ||
| +# | ||
| +# http://www.apache.org/licenses/LICENSE-2.0 | ||
| +# | ||
| +# Unless required by applicable law or agreed to in writing, software | ||
| +# distributed under the License is distributed on an "AS IS" BASIS, | ||
| +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| +# See the License for the specific language governing permissions and | ||
| +# limitations under the License. | ||
| + | ||
| +'''The setup and build script for the python-twitter library.''' | ||
| + | ||
| +__author__ = '[email protected]' | ||
| +__version__ = '0.8.3' | ||
| + | ||
| + | ||
| +# The base package metadata to be used by both distutils and setuptools | ||
| +METADATA = dict( | ||
| + name = "python-twitter", | ||
| + version = __version__, | ||
| + py_modules = ['twitter'], | ||
| + author='The Python-Twitter Developers', | ||
| + author_email='[email protected]', | ||
| + description='A python wrapper around the Twitter API', | ||
| + license='Apache License 2.0', | ||
| + url='http://code.google.com/p/python-twitter/', | ||
| + keywords='twitter api', | ||
| +) | ||
| + | ||
| +# Extra package metadata to be used only if setuptools is installed | ||
| +SETUPTOOLS_METADATA = dict( | ||
| + install_requires = ['setuptools', 'simplejson', 'oauth2'], | ||
| + include_package_data = True, | ||
| + classifiers = [ | ||
| + 'Development Status :: 4 - Beta', | ||
| + 'Intended Audience :: Developers', | ||
| + 'License :: OSI Approved :: Apache Software License', | ||
| + 'Topic :: Software Development :: Libraries :: Python Modules', | ||
| + 'Topic :: Communications :: Chat', | ||
| + 'Topic :: Internet', | ||
| + ], | ||
| + test_suite = 'twitter_test.suite', | ||
| +) | ||
| + | ||
| + | ||
| +def Read(file): | ||
| + return open(file).read() | ||
| + | ||
| +def BuildLongDescription(): | ||
| + return '\n'.join([Read('README'), Read('CHANGES')]) | ||
| + | ||
| +def Main(): | ||
| + # Build the long_description from the README and CHANGES | ||
| + METADATA['long_description'] = BuildLongDescription() | ||
| + | ||
| + # Use setuptools if available, otherwise fallback and use distutils | ||
| + try: | ||
| + import setuptools | ||
| + METADATA.update(SETUPTOOLS_METADATA) | ||
| + setuptools.setup(**METADATA) | ||
| + except ImportError: | ||
| + import distutils.core | ||
| + distutils.core.setup(**METADATA) | ||
| + | ||
| + | ||
| +if __name__ == '__main__': | ||
| + Main() |
316
simplejson/__init__.py
| @@ -0,0 +1,316 @@ | ||
| +r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of | ||
| +JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data | ||
| +interchange format. | ||
| + | ||
| +:mod:`simplejson` exposes an API familiar to users of the standard library | ||
| +:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained | ||
| +version of the :mod:`json` library contained in Python 2.6, but maintains | ||
| +compatibility with Python 2.4 and Python 2.5 and (currently) has | ||
| +significant performance advantages, even without using the optional C | ||
| +extension for speedups. | ||
| + | ||
| +Encoding basic Python object hierarchies:: | ||
| + | ||
| + >>> import simplejson as json | ||
| + >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) | ||
| + '["foo", {"bar": ["baz", null, 1.0, 2]}]' | ||
| + >>> print json.dumps("\"foo\bar") | ||
| + "\"foo\bar" | ||
| + >>> print json.dumps(u'\u1234') | ||
| + "\u1234" | ||
| + >>> print json.dumps('\\') | ||
| + "\\" | ||
| + >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True) | ||
| + {"a": 0, "b": 0, "c": 0} | ||
| + >>> from StringIO import StringIO | ||
| + >>> io = StringIO() | ||
| + >>> json.dump(['streaming API'], io) | ||
| + >>> io.getvalue() | ||
| + '["streaming API"]' | ||
| + | ||
| +Compact encoding:: | ||
| + | ||
| + >>> import simplejson as json | ||
| + >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':')) | ||
| + '[1,2,3,{"4":5,"6":7}]' | ||
| + | ||
| +Pretty printing:: | ||
| + | ||
| + >>> import simplejson as json | ||
| + >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4) | ||
| + >>> print '\n'.join([l.rstrip() for l in s.splitlines()]) | ||
| + { | ||
| + "4": 5, | ||
| + "6": 7 | ||
| + } | ||
| + | ||
| +Decoding JSON:: | ||
| + | ||
| + >>> import simplejson as json | ||
| + >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] | ||
| + >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj | ||
| + True | ||
| + >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar' | ||
| + True | ||
| + >>> from StringIO import StringIO | ||
| + >>> io = StringIO('["streaming API"]') | ||
| + >>> json.load(io)[0] == 'streaming API' | ||
| + True | ||
| + | ||
| +Specializing JSON object decoding:: | ||
| + | ||
| + >>> import simplejson as json | ||
| + >>> def as_complex(dct): | ||
| + ... if '__complex__' in dct: | ||
| + ... return complex(dct['real'], dct['imag']) | ||
| + ... return dct | ||
| + ... | ||
| + >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', | ||
| + ... object_hook=as_complex) | ||
| + (1+2j) | ||
| + >>> import decimal | ||
| + >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1') | ||
| + True | ||
| + | ||
| +Specializing JSON object encoding:: | ||
| + | ||
| + >>> import simplejson as json | ||
| + >>> def encode_complex(obj): | ||
| + ... if isinstance(obj, complex): | ||
| + ... return [obj.real, obj.imag] | ||
| + ... raise TypeError("%r is not JSON serializable" % (o,)) | ||
| + ... | ||
| + >>> json.dumps(2 + 1j, default=encode_complex) | ||
| + '[2.0, 1.0]' | ||
| + >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) | ||
| + '[2.0, 1.0]' | ||
| + >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) | ||
| + '[2.0, 1.0]' | ||
| + | ||
| + | ||
| +Using simplejson.tool from the shell to validate and pretty-print:: | ||
| + | ||
| + $ echo '{"json":"obj"}' | python -msimplejson.tool | ||
| + { | ||
| + "json": "obj" | ||
| + } | ||
| + $ echo '{ 1.2:3.4}' | python -msimplejson.tool | ||
| + Expecting property name: line 1 column 2 (char 2) | ||
| +""" | ||
| +__version__ = '2.0.7' | ||
| +__all__ = [ | ||
| + 'dump', 'dumps', 'load', 'loads', | ||
| + 'JSONDecoder', 'JSONEncoder', | ||
| +] | ||
| + | ||
| +from decoder import JSONDecoder | ||
| +from encoder import JSONEncoder | ||
| + | ||
| +_default_encoder = JSONEncoder( | ||
| + skipkeys=False, | ||
| + ensure_ascii=True, | ||
| + check_circular=True, | ||
| + allow_nan=True, | ||
| + indent=None, | ||
| + separators=None, | ||
| + encoding='utf-8', | ||
| + default=None, | ||
| +) | ||
| + | ||
| +def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, | ||
| + allow_nan=True, cls=None, indent=None, separators=None, | ||
| + encoding='utf-8', default=None, **kw): | ||
| + """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a | ||
| + ``.write()``-supporting file-like object). | ||
| + | ||
| + If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types | ||
| + (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) | ||
| + will be skipped instead of raising a ``TypeError``. | ||
| + | ||
| + If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp`` | ||
| + may be ``unicode`` instances, subject to normal Python ``str`` to | ||
| + ``unicode`` coercion rules. Unless ``fp.write()`` explicitly | ||
| + understands ``unicode`` (as in ``codecs.getwriter()``) this is likely | ||
| + to cause an error. | ||
| + | ||
| + If ``check_circular`` is ``False``, then the circular reference check | ||
| + for container types will be skipped and a circular reference will | ||
| + result in an ``OverflowError`` (or worse). | ||
| + | ||
| + If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to | ||
| + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) | ||
| + in strict compliance of the JSON specification, instead of using the | ||
| + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). | ||
| + | ||
| + If ``indent`` is a non-negative integer, then JSON array elements and object | ||
| + members will be pretty-printed with that indent level. An indent level | ||
| + of 0 will only insert newlines. ``None`` is the most compact representation. | ||
| + | ||
| + If ``separators`` is an ``(item_separator, dict_separator)`` tuple | ||
| + then it will be used instead of the default ``(', ', ': ')`` separators. | ||
| + ``(',', ':')`` is the most compact JSON representation. | ||
| + | ||
| + ``encoding`` is the character encoding for str instances, default is UTF-8. | ||
| + | ||
| + ``default(obj)`` is a function that should return a serializable version | ||
| + of obj or raise TypeError. The default simply raises TypeError. | ||
| + | ||
| + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the | ||
| + ``.default()`` method to serialize additional types), specify it with | ||
| + the ``cls`` kwarg. | ||
| + | ||
| + """ | ||
| + # cached encoder | ||
| + if (skipkeys is False and ensure_ascii is True and | ||
| + check_circular is True and allow_nan is True and | ||
| + cls is None and indent is None and separators is None and | ||
| + encoding == 'utf-8' and default is None and not kw): | ||
| + iterable = _default_encoder.iterencode(obj) | ||
| + else: | ||
| + if cls is None: | ||
| + cls = JSONEncoder | ||
| + iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, | ||
| + check_circular=check_circular, allow_nan=allow_nan, indent=indent, | ||
| + separators=separators, encoding=encoding, | ||
| + default=default, **kw).iterencode(obj) | ||
| + # could accelerate with writelines in some versions of Python, at | ||
| + # a debuggability cost | ||
| + for chunk in iterable: | ||
| + fp.write(chunk) | ||
| + | ||
| + | ||
| +def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, | ||
| + allow_nan=True, cls=None, indent=None, separators=None, | ||
| + encoding='utf-8', default=None, **kw): | ||
| + """Serialize ``obj`` to a JSON formatted ``str``. | ||
| + | ||
| + If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types | ||
| + (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) | ||
| + will be skipped instead of raising a ``TypeError``. | ||
| + | ||
| + If ``ensure_ascii`` is ``False``, then the return value will be a | ||
| + ``unicode`` instance subject to normal Python ``str`` to ``unicode`` | ||
| + coercion rules instead of being escaped to an ASCII ``str``. | ||
| + | ||
| + If ``check_circular`` is ``False``, then the circular reference check | ||
| + for container types will be skipped and a circular reference will | ||
| + result in an ``OverflowError`` (or worse). | ||
| + | ||
| + If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to | ||
| + serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in | ||
| + strict compliance of the JSON specification, instead of using the | ||
| + JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). | ||
| + | ||
| + If ``indent`` is a non-negative integer, then JSON array elements and | ||
| + object members will be pretty-printed with that indent level. An indent | ||
| + level of 0 will only insert newlines. ``None`` is the most compact | ||
| + representation. | ||
| + | ||
| + If ``separators`` is an ``(item_separator, dict_separator)`` tuple | ||
| + then it will be used instead of the default ``(', ', ': ')`` separators. | ||
| + ``(',', ':')`` is the most compact JSON representation. | ||
| + | ||
| + ``encoding`` is the character encoding for str instances, default is UTF-8. | ||
| + | ||
| + ``default(obj)`` is a function that should return a serializable version | ||
| + of obj or raise TypeError. The default simply raises TypeError. | ||
| + | ||
| + To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the | ||
| + ``.default()`` method to serialize additional types), specify it with | ||
| + the ``cls`` kwarg. | ||
| + | ||
| + """ | ||
| + # cached encoder | ||
| + if (skipkeys is False and ensure_ascii is True and | ||
| + check_circular is True and allow_nan is True and | ||
| + cls is None and indent is None and separators is None and | ||
| + encoding == 'utf-8' and default is None and not kw): | ||
| + return _default_encoder.encode(obj) | ||
| + if cls is None: | ||
| + cls = JSONEncoder | ||
| + return cls( | ||
| + skipkeys=skipkeys, ensure_ascii=ensure_ascii, | ||
| + check_circular=check_circular, allow_nan=allow_nan, indent=indent, | ||
| + separators=separators, encoding=encoding, default=default, | ||
| + **kw).encode(obj) | ||
| + | ||
| + | ||
| +_default_decoder = JSONDecoder(encoding=None, object_hook=None) | ||
| + | ||
| + | ||
| +def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, | ||
| + parse_int=None, parse_constant=None, **kw): | ||
| + """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing | ||
| + a JSON document) to a Python object. | ||
| + | ||
| + If the contents of ``fp`` is encoded with an ASCII based encoding other | ||
| + than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must | ||
| + be specified. Encodings that are not ASCII based (such as UCS-2) are | ||
| + not allowed, and should be wrapped with | ||
| + ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode`` | ||
| + object and passed to ``loads()`` | ||
| + | ||
| + ``object_hook`` is an optional function that will be called with the | ||
| + result of any object literal decode (a ``dict``). The return value of | ||
| + ``object_hook`` will be used instead of the ``dict``. This feature | ||
| + can be used to implement custom decoders (e.g. JSON-RPC class hinting). | ||
| + | ||
| + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` | ||
| + kwarg. | ||
| + | ||
| + """ | ||
| + return loads(fp.read(), | ||
| + encoding=encoding, cls=cls, object_hook=object_hook, | ||
| + parse_float=parse_float, parse_int=parse_int, | ||
| + parse_constant=parse_constant, **kw) | ||
| + | ||
| + | ||
| +def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, | ||
| + parse_int=None, parse_constant=None, **kw): | ||
| + """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON | ||
| + document) to a Python object. | ||
| + | ||
| + If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding | ||
| + other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name | ||
| + must be specified. Encodings that are not ASCII based (such as UCS-2) | ||
| + are not allowed and should be decoded to ``unicode`` first. | ||
| + | ||
| + ``object_hook`` is an optional function that will be called with the | ||
| + result of any object literal decode (a ``dict``). The return value of | ||
| + ``object_hook`` will be used instead of the ``dict``. This feature | ||
| + can be used to implement custom decoders (e.g. JSON-RPC class hinting). | ||
| + | ||
| + ``parse_float``, if specified, will be called with the string | ||
| + of every JSON float to be decoded. By default this is equivalent to | ||
| + float(num_str). This can be used to use another datatype or parser | ||
| + for JSON floats (e.g. decimal.Decimal). | ||
| + | ||
| + ``parse_int``, if specified, will be called with the string | ||
| + of every JSON int to be decoded. By default this is equivalent to | ||
| + int(num_str). This can be used to use another datatype or parser | ||
| + for JSON integers (e.g. float). | ||
| + | ||
| + ``parse_constant``, if specified, will be called with one of the | ||
| + following strings: -Infinity, Infinity, NaN, null, true, false. | ||
| + This can be used to raise an exception if invalid JSON numbers | ||
| + are encountered. | ||
| + | ||
| + To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` | ||
| + kwarg. | ||
| + | ||
| + """ | ||
| + if (cls is None and encoding is None and object_hook is None and | ||
| + parse_int is None and parse_float is None and | ||
| + parse_constant is None and not kw): | ||
| + return _default_decoder.decode(s) | ||
| + if cls is None: | ||
| + cls = JSONDecoder | ||
| + if object_hook is not None: | ||
| + kw['object_hook'] = object_hook | ||
| + if parse_float is not None: | ||
| + kw['parse_float'] = parse_float | ||
| + if parse_int is not None: | ||
| + kw['parse_int'] = parse_int | ||
| + if parse_constant is not None: | ||
| + kw['parse_constant'] = parse_constant | ||
| + return cls(encoding=encoding, **kw).decode(s) |
2,265
simplejson/_speedups.c
| @@ -0,0 +1,2265 @@ | ||
| +#include "Python.h" | ||
| +#include "structmember.h" | ||
| +#if PY_VERSION_HEX < 0x02060000 && !defined(Py_TYPE) | ||
| +#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) | ||
| +#endif | ||
| +#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN) | ||
| +typedef int Py_ssize_t; | ||
| +#define PY_SSIZE_T_MAX INT_MAX | ||
| +#define PY_SSIZE_T_MIN INT_MIN | ||
| +#define PyInt_FromSsize_t PyInt_FromLong | ||
| +#define PyInt_AsSsize_t PyInt_AsLong | ||
| +#endif | ||
| +#ifndef Py_IS_FINITE | ||
| +#define Py_IS_FINITE(X) (!Py_IS_INFINITY(X) && !Py_IS_NAN(X)) | ||
| +#endif | ||
| + | ||
| +#ifdef __GNUC__ | ||
| +#define UNUSED __attribute__((__unused__)) | ||
| +#else | ||
| +#define UNUSED | ||
| +#endif | ||
| + | ||
| +#define DEFAULT_ENCODING "utf-8" | ||
| + | ||
| +#define PyScanner_Check(op) PyObject_TypeCheck(op, &PyScannerType) | ||
| +#define PyScanner_CheckExact(op) (Py_TYPE(op) == &PyScannerType) | ||
| +#define PyEncoder_Check(op) PyObject_TypeCheck(op, &PyEncoderType) | ||
| +#define PyEncoder_CheckExact(op) (Py_TYPE(op) == &PyEncoderType) | ||
| + | ||
| +static PyTypeObject PyScannerType; | ||
| +static PyTypeObject PyEncoderType; | ||
| + | ||
| +typedef struct _PyScannerObject { | ||
| + PyObject_HEAD | ||
| + PyObject *encoding; | ||
| + PyObject *strict; | ||
| + PyObject *object_hook; | ||
| + PyObject *parse_float; | ||
| + PyObject *parse_int; | ||
| + PyObject *parse_constant; | ||
| +} PyScannerObject; | ||
| + | ||
| +static PyMemberDef scanner_members[] = { | ||
| + {"encoding", T_OBJECT, offsetof(PyScannerObject, encoding), READONLY, "encoding"}, | ||
| + {"strict", T_OBJECT, offsetof(PyScannerObject, strict), READONLY, "strict"}, | ||
| + {"object_hook", T_OBJECT, offsetof(PyScannerObject, object_hook), READONLY, "object_hook"}, | ||
| + {"parse_float", T_OBJECT, offsetof(PyScannerObject, parse_float), READONLY, "parse_float"}, | ||
| + {"parse_int", T_OBJECT, offsetof(PyScannerObject, parse_int), READONLY, "parse_int"}, | ||
| + {"parse_constant", T_OBJECT, offsetof(PyScannerObject, parse_constant), READONLY, "parse_constant"}, | ||
| + {NULL} | ||
| +}; | ||
| + | ||
| +typedef struct _PyEncoderObject { | ||
| + PyObject_HEAD | ||
| + PyObject *markers; | ||
| + PyObject *defaultfn; | ||
| + PyObject *encoder; | ||
| + PyObject *indent; | ||
| + PyObject *key_separator; | ||
| + PyObject *item_separator; | ||
| + PyObject *sort_keys; | ||
| + PyObject *skipkeys; | ||
| + int fast_encode; | ||
| + int allow_nan; | ||
| +} PyEncoderObject; | ||
| + | ||
| +static PyMemberDef encoder_members[] = { | ||
| + {"markers", T_OBJECT, offsetof(PyEncoderObject, markers), READONLY, "markers"}, | ||
| + {"default", T_OBJECT, offsetof(PyEncoderObject, defaultfn), READONLY, "default"}, | ||
| + {"encoder", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoder"}, | ||
| + {"indent", T_OBJECT, offsetof(PyEncoderObject, indent), READONLY, "indent"}, | ||
| + {"key_separator", T_OBJECT, offsetof(PyEncoderObject, key_separator), READONLY, "key_separator"}, | ||
| + {"item_separator", T_OBJECT, offsetof(PyEncoderObject, item_separator), READONLY, "item_separator"}, | ||
| + {"sort_keys", T_OBJECT, offsetof(PyEncoderObject, sort_keys), READONLY, "sort_keys"}, | ||
| + {"skipkeys", T_OBJECT, offsetof(PyEncoderObject, skipkeys), READONLY, "skipkeys"}, | ||
| + {NULL} | ||
| +}; | ||
| + | ||
| +static Py_ssize_t | ||
| +ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars); | ||
| +static PyObject * | ||
| +ascii_escape_unicode(PyObject *pystr); | ||
| +static PyObject * | ||
| +ascii_escape_str(PyObject *pystr); | ||
| +static PyObject * | ||
| +py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr); | ||
| +void init_speedups(void); | ||
| +static PyObject * | ||
| +scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); | ||
| +static PyObject * | ||
| +scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr); | ||
| +static PyObject * | ||
| +_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx); | ||
| +static int | ||
| +scanner_init(PyObject *self, PyObject *args, PyObject *kwds); | ||
| +static void | ||
| +scanner_dealloc(PyObject *self); | ||
| +static int | ||
| +encoder_init(PyObject *self, PyObject *args, PyObject *kwds); | ||
| +static void | ||
| +encoder_dealloc(PyObject *self); | ||
| +static int | ||
| +encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ssize_t indent_level); | ||
| +static int | ||
| +encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level); | ||
| +static int | ||
| +encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level); | ||
| +static PyObject * | ||
| +_encoded_const(PyObject *const); | ||
| +static void | ||
| +raise_errmsg(char *msg, PyObject *s, Py_ssize_t end); | ||
| +static PyObject * | ||
| +encoder_encode_string(PyEncoderObject *s, PyObject *obj); | ||
| +static int | ||
| +_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr); | ||
| +static PyObject * | ||
| +_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr); | ||
| +static PyObject * | ||
| +encoder_encode_float(PyEncoderObject *s, PyObject *obj); | ||
| + | ||
| +#define S_CHAR(c) (c >= ' ' && c <= '~' && c != '\\' && c != '"') | ||
| +#define IS_WHITESPACE(c) (((c) == ' ') || ((c) == '\t') || ((c) == '\n') || ((c) == '\r')) | ||
| + | ||
| +#define MIN_EXPANSION 6 | ||
| +#ifdef Py_UNICODE_WIDE | ||
| +#define MAX_EXPANSION (2 * MIN_EXPANSION) | ||
| +#else | ||
| +#define MAX_EXPANSION MIN_EXPANSION | ||
| +#endif | ||
| + | ||
| +static int | ||
| +_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr) | ||
| +{ | ||
| + /* PyObject to Py_ssize_t converter */ | ||
| + *size_ptr = PyInt_AsSsize_t(o); | ||
| + if (*size_ptr == -1 && PyErr_Occurred()); | ||
| + return 1; | ||
| + return 0; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr) | ||
| +{ | ||
| + /* Py_ssize_t to PyObject converter */ | ||
| + return PyInt_FromSsize_t(*size_ptr); | ||
| +} | ||
| + | ||
| +static Py_ssize_t | ||
| +ascii_escape_char(Py_UNICODE c, char *output, Py_ssize_t chars) | ||
| +{ | ||
| + /* Escape unicode code point c to ASCII escape sequences | ||
| + in char *output. output must have at least 12 bytes unused to | ||
| + accommodate an escaped surrogate pair "\uXXXX\uXXXX" */ | ||
| + output[chars++] = '\\'; | ||
| + switch (c) { | ||
| + case '\\': output[chars++] = (char)c; break; | ||
| + case '"': output[chars++] = (char)c; break; | ||
| + case '\b': output[chars++] = 'b'; break; | ||
| + case '\f': output[chars++] = 'f'; break; | ||
| + case '\n': output[chars++] = 'n'; break; | ||
| + case '\r': output[chars++] = 'r'; break; | ||
| + case '\t': output[chars++] = 't'; break; | ||
| + default: | ||
| +#ifdef Py_UNICODE_WIDE | ||
| + if (c >= 0x10000) { | ||
| + /* UTF-16 surrogate pair */ | ||
| + Py_UNICODE v = c - 0x10000; | ||
| + c = 0xd800 | ((v >> 10) & 0x3ff); | ||
| + output[chars++] = 'u'; | ||
| + output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf]; | ||
| + output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf]; | ||
| + output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf]; | ||
| + output[chars++] = "0123456789abcdef"[(c ) & 0xf]; | ||
| + c = 0xdc00 | (v & 0x3ff); | ||
| + output[chars++] = '\\'; | ||
| + } | ||
| +#endif | ||
| + output[chars++] = 'u'; | ||
| + output[chars++] = "0123456789abcdef"[(c >> 12) & 0xf]; | ||
| + output[chars++] = "0123456789abcdef"[(c >> 8) & 0xf]; | ||
| + output[chars++] = "0123456789abcdef"[(c >> 4) & 0xf]; | ||
| + output[chars++] = "0123456789abcdef"[(c ) & 0xf]; | ||
| + } | ||
| + return chars; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +ascii_escape_unicode(PyObject *pystr) | ||
| +{ | ||
| + /* Take a PyUnicode pystr and return a new ASCII-only escaped PyString */ | ||
| + Py_ssize_t i; | ||
| + Py_ssize_t input_chars; | ||
| + Py_ssize_t output_size; | ||
| + Py_ssize_t max_output_size; | ||
| + Py_ssize_t chars; | ||
| + PyObject *rval; | ||
| + char *output; | ||
| + Py_UNICODE *input_unicode; | ||
| + | ||
| + input_chars = PyUnicode_GET_SIZE(pystr); | ||
| + input_unicode = PyUnicode_AS_UNICODE(pystr); | ||
| + | ||
| + /* One char input can be up to 6 chars output, estimate 4 of these */ | ||
| + output_size = 2 + (MIN_EXPANSION * 4) + input_chars; | ||
| + max_output_size = 2 + (input_chars * MAX_EXPANSION); | ||
| + rval = PyString_FromStringAndSize(NULL, output_size); | ||
| + if (rval == NULL) { | ||
| + return NULL; | ||
| + } | ||
| + output = PyString_AS_STRING(rval); | ||
| + chars = 0; | ||
| + output[chars++] = '"'; | ||
| + for (i = 0; i < input_chars; i++) { | ||
| + Py_UNICODE c = input_unicode[i]; | ||
| + if (S_CHAR(c)) { | ||
| + output[chars++] = (char)c; | ||
| + } | ||
| + else { | ||
| + chars = ascii_escape_char(c, output, chars); | ||
| + } | ||
| + if (output_size - chars < (1 + MAX_EXPANSION)) { | ||
| + /* There's more than four, so let's resize by a lot */ | ||
| + Py_ssize_t new_output_size = output_size * 2; | ||
| + /* This is an upper bound */ | ||
| + if (new_output_size > max_output_size) { | ||
| + new_output_size = max_output_size; | ||
| + } | ||
| + /* Make sure that the output size changed before resizing */ | ||
| + if (new_output_size != output_size) { | ||
| + output_size = new_output_size; | ||
| + if (_PyString_Resize(&rval, output_size) == -1) { | ||
| + return NULL; | ||
| + } | ||
| + output = PyString_AS_STRING(rval); | ||
| + } | ||
| + } | ||
| + } | ||
| + output[chars++] = '"'; | ||
| + if (_PyString_Resize(&rval, chars) == -1) { | ||
| + return NULL; | ||
| + } | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +ascii_escape_str(PyObject *pystr) | ||
| +{ | ||
| + /* Take a PyString pystr and return a new ASCII-only escaped PyString */ | ||
| + Py_ssize_t i; | ||
| + Py_ssize_t input_chars; | ||
| + Py_ssize_t output_size; | ||
| + Py_ssize_t chars; | ||
| + PyObject *rval; | ||
| + char *output; | ||
| + char *input_str; | ||
| + | ||
| + input_chars = PyString_GET_SIZE(pystr); | ||
| + input_str = PyString_AS_STRING(pystr); | ||
| + | ||
| + /* Fast path for a string that's already ASCII */ | ||
| + for (i = 0; i < input_chars; i++) { | ||
| + Py_UNICODE c = (Py_UNICODE)(unsigned char)input_str[i]; | ||
| + if (!S_CHAR(c)) { | ||
| + /* If we have to escape something, scan the string for unicode */ | ||
| + Py_ssize_t j; | ||
| + for (j = i; j < input_chars; j++) { | ||
| + c = (Py_UNICODE)(unsigned char)input_str[j]; | ||
| + if (c > 0x7f) { | ||
| + /* We hit a non-ASCII character, bail to unicode mode */ | ||
| + PyObject *uni; | ||
| + uni = PyUnicode_DecodeUTF8(input_str, input_chars, "strict"); | ||
| + if (uni == NULL) { | ||
| + return NULL; | ||
| + } | ||
| + rval = ascii_escape_unicode(uni); | ||
| + Py_DECREF(uni); | ||
| + return rval; | ||
| + } | ||
| + } | ||
| + break; | ||
| + } | ||
| + } | ||
| + | ||
| + if (i == input_chars) { | ||
| + /* Input is already ASCII */ | ||
| + output_size = 2 + input_chars; | ||
| + } | ||
| + else { | ||
| + /* One char input can be up to 6 chars output, estimate 4 of these */ | ||
| + output_size = 2 + (MIN_EXPANSION * 4) + input_chars; | ||
| + } | ||
| + rval = PyString_FromStringAndSize(NULL, output_size); | ||
| + if (rval == NULL) { | ||
| + return NULL; | ||
| + } | ||
| + output = PyString_AS_STRING(rval); | ||
| + output[0] = '"'; | ||
| + | ||
| + /* We know that everything up to i is ASCII already */ | ||
| + chars = i + 1; | ||
| + memcpy(&output[1], input_str, i); | ||
| + | ||
| + for (; i < input_chars; i++) { | ||
| + Py_UNICODE c = (Py_UNICODE)(unsigned char)input_str[i]; | ||
| + if (S_CHAR(c)) { | ||
| + output[chars++] = (char)c; | ||
| + } | ||
| + else { | ||
| + chars = ascii_escape_char(c, output, chars); | ||
| + } | ||
| + /* An ASCII char can't possibly expand to a surrogate! */ | ||
| + if (output_size - chars < (1 + MIN_EXPANSION)) { | ||
| + /* There's more than four, so let's resize by a lot */ | ||
| + output_size *= 2; | ||
| + if (output_size > 2 + (input_chars * MIN_EXPANSION)) { | ||
| + output_size = 2 + (input_chars * MIN_EXPANSION); | ||
| + } | ||
| + if (_PyString_Resize(&rval, output_size) == -1) { | ||
| + return NULL; | ||
| + } | ||
| + output = PyString_AS_STRING(rval); | ||
| + } | ||
| + } | ||
| + output[chars++] = '"'; | ||
| + if (_PyString_Resize(&rval, chars) == -1) { | ||
| + return NULL; | ||
| + } | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static void | ||
| +raise_errmsg(char *msg, PyObject *s, Py_ssize_t end) | ||
| +{ | ||
| + /* Use the Python function simplejson.decoder.errmsg to raise a nice | ||
| + looking ValueError exception */ | ||
| + static PyObject *errmsg_fn = NULL; | ||
| + PyObject *pymsg; | ||
| + if (errmsg_fn == NULL) { | ||
| + PyObject *decoder = PyImport_ImportModule("simplejson.decoder"); | ||
| + if (decoder == NULL) | ||
| + return; | ||
| + errmsg_fn = PyObject_GetAttrString(decoder, "errmsg"); | ||
| + Py_DECREF(decoder); | ||
| + if (errmsg_fn == NULL) | ||
| + return; | ||
| + } | ||
| + pymsg = PyObject_CallFunction(errmsg_fn, "(zOO&)", msg, s, _convertPyInt_FromSsize_t, &end); | ||
| + if (pymsg) { | ||
| + PyErr_SetObject(PyExc_ValueError, pymsg); | ||
| + Py_DECREF(pymsg); | ||
| + } | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +join_list_unicode(PyObject *lst) | ||
| +{ | ||
| + /* return u''.join(lst) */ | ||
| + static PyObject *joinfn = NULL; | ||
| + if (joinfn == NULL) { | ||
| + PyObject *ustr = PyUnicode_FromUnicode(NULL, 0); | ||
| + if (ustr == NULL) | ||
| + return NULL; | ||
| + | ||
| + joinfn = PyObject_GetAttrString(ustr, "join"); | ||
| + Py_DECREF(ustr); | ||
| + if (joinfn == NULL) | ||
| + return NULL; | ||
| + } | ||
| + return PyObject_CallFunctionObjArgs(joinfn, lst, NULL); | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +join_list_string(PyObject *lst) | ||
| +{ | ||
| + /* return ''.join(lst) */ | ||
| + static PyObject *joinfn = NULL; | ||
| + if (joinfn == NULL) { | ||
| + PyObject *ustr = PyString_FromStringAndSize(NULL, 0); | ||
| + if (ustr == NULL) | ||
| + return NULL; | ||
| + | ||
| + joinfn = PyObject_GetAttrString(ustr, "join"); | ||
| + Py_DECREF(ustr); | ||
| + if (joinfn == NULL) | ||
| + return NULL; | ||
| + } | ||
| + return PyObject_CallFunctionObjArgs(joinfn, lst, NULL); | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) { | ||
| + /* return (rval, idx) tuple, stealing reference to rval */ | ||
| + PyObject *tpl; | ||
| + PyObject *pyidx; | ||
| + /* | ||
| + steal a reference to rval, returns (rval, idx) | ||
| + */ | ||
| + if (rval == NULL) { | ||
| + return NULL; | ||
| + } | ||
| + pyidx = PyInt_FromSsize_t(idx); | ||
| + if (pyidx == NULL) { | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| + } | ||
| + tpl = PyTuple_New(2); | ||
| + if (tpl == NULL) { | ||
| + Py_DECREF(pyidx); | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| + } | ||
| + PyTuple_SET_ITEM(tpl, 0, rval); | ||
| + PyTuple_SET_ITEM(tpl, 1, pyidx); | ||
| + return tpl; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +scanstring_str(PyObject *pystr, Py_ssize_t end, char *encoding, int strict, Py_ssize_t *next_end_ptr) | ||
| +{ | ||
| + /* Read the JSON string from PyString pystr. | ||
| + end is the index of the first character after the quote. | ||
| + encoding is the encoding of pystr (must be an ASCII superset) | ||
| + if strict is zero then literal control characters are allowed | ||
| + *next_end_ptr is a return-by-reference index of the character | ||
| + after the end quote | ||
| + | ||
| + Return value is a new PyString (if ASCII-only) or PyUnicode | ||
| + */ | ||
| + PyObject *rval; | ||
| + Py_ssize_t len = PyString_GET_SIZE(pystr); | ||
| + Py_ssize_t begin = end - 1; | ||
| + Py_ssize_t next = begin; | ||
| + int has_unicode = 0; | ||
| + char *buf = PyString_AS_STRING(pystr); | ||
| + PyObject *chunks = PyList_New(0); | ||
| + if (chunks == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + if (end < 0 || len <= end) { | ||
| + PyErr_SetString(PyExc_ValueError, "end is out of bounds"); | ||
| + goto bail; | ||
| + } | ||
| + while (1) { | ||
| + /* Find the end of the string or the next escape */ | ||
| + Py_UNICODE c = 0; | ||
| + PyObject *chunk = NULL; | ||
| + for (next = end; next < len; next++) { | ||
| + c = (unsigned char)buf[next]; | ||
| + if (c == '"' || c == '\\') { | ||
| + break; | ||
| + } | ||
| + else if (strict && c <= 0x1f) { | ||
| + raise_errmsg("Invalid control character at", pystr, next); | ||
| + goto bail; | ||
| + } | ||
| + else if (c > 0x7f) { | ||
| + has_unicode = 1; | ||
| + } | ||
| + } | ||
| + if (!(c == '"' || c == '\\')) { | ||
| + raise_errmsg("Unterminated string starting at", pystr, begin); | ||
| + goto bail; | ||
| + } | ||
| + /* Pick up this chunk if it's not zero length */ | ||
| + if (next != end) { | ||
| + PyObject *strchunk = PyString_FromStringAndSize(&buf[end], next - end); | ||
| + if (strchunk == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + if (has_unicode) { | ||
| + chunk = PyUnicode_FromEncodedObject(strchunk, encoding, NULL); | ||
| + Py_DECREF(strchunk); | ||
| + if (chunk == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + else { | ||
| + chunk = strchunk; | ||
| + } | ||
| + if (PyList_Append(chunks, chunk)) { | ||
| + Py_DECREF(chunk); | ||
| + goto bail; | ||
| + } | ||
| + Py_DECREF(chunk); | ||
| + } | ||
| + next++; | ||
| + if (c == '"') { | ||
| + end = next; | ||
| + break; | ||
| + } | ||
| + if (next == len) { | ||
| + raise_errmsg("Unterminated string starting at", pystr, begin); | ||
| + goto bail; | ||
| + } | ||
| + c = buf[next]; | ||
| + if (c != 'u') { | ||
| + /* Non-unicode backslash escapes */ | ||
| + end = next + 1; | ||
| + switch (c) { | ||
| + case '"': break; | ||
| + case '\\': break; | ||
| + case '/': break; | ||
| + case 'b': c = '\b'; break; | ||
| + case 'f': c = '\f'; break; | ||
| + case 'n': c = '\n'; break; | ||
| + case 'r': c = '\r'; break; | ||
| + case 't': c = '\t'; break; | ||
| + default: c = 0; | ||
| + } | ||
| + if (c == 0) { | ||
| + raise_errmsg("Invalid \\escape", pystr, end - 2); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + else { | ||
| + c = 0; | ||
| + next++; | ||
| + end = next + 4; | ||
| + if (end >= len) { | ||
| + raise_errmsg("Invalid \\uXXXX escape", pystr, next - 1); | ||
| + goto bail; | ||
| + } | ||
| + /* Decode 4 hex digits */ | ||
| + for (; next < end; next++) { | ||
| + Py_UNICODE digit = buf[next]; | ||
| + c <<= 4; | ||
| + switch (digit) { | ||
| + case '0': case '1': case '2': case '3': case '4': | ||
| + case '5': case '6': case '7': case '8': case '9': | ||
| + c |= (digit - '0'); break; | ||
| + case 'a': case 'b': case 'c': case 'd': case 'e': | ||
| + case 'f': | ||
| + c |= (digit - 'a' + 10); break; | ||
| + case 'A': case 'B': case 'C': case 'D': case 'E': | ||
| + case 'F': | ||
| + c |= (digit - 'A' + 10); break; | ||
| + default: | ||
| + raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| +#ifdef Py_UNICODE_WIDE | ||
| + /* Surrogate pair */ | ||
| + if ((c & 0xfc00) == 0xd800) { | ||
| + Py_UNICODE c2 = 0; | ||
| + if (end + 6 >= len) { | ||
| + raise_errmsg("Unpaired high surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + if (buf[next++] != '\\' || buf[next++] != 'u') { | ||
| + raise_errmsg("Unpaired high surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + end += 6; | ||
| + /* Decode 4 hex digits */ | ||
| + for (; next < end; next++) { | ||
| + c2 <<= 4; | ||
| + Py_UNICODE digit = buf[next]; | ||
| + switch (digit) { | ||
| + case '0': case '1': case '2': case '3': case '4': | ||
| + case '5': case '6': case '7': case '8': case '9': | ||
| + c2 |= (digit - '0'); break; | ||
| + case 'a': case 'b': case 'c': case 'd': case 'e': | ||
| + case 'f': | ||
| + c2 |= (digit - 'a' + 10); break; | ||
| + case 'A': case 'B': case 'C': case 'D': case 'E': | ||
| + case 'F': | ||
| + c2 |= (digit - 'A' + 10); break; | ||
| + default: | ||
| + raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + if ((c2 & 0xfc00) != 0xdc00) { | ||
| + raise_errmsg("Unpaired high surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); | ||
| + } | ||
| + else if ((c & 0xfc00) == 0xdc00) { | ||
| + raise_errmsg("Unpaired low surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| +#endif | ||
| + } | ||
| + if (c > 0x7f) { | ||
| + has_unicode = 1; | ||
| + } | ||
| + if (has_unicode) { | ||
| + chunk = PyUnicode_FromUnicode(&c, 1); | ||
| + if (chunk == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + else { | ||
| + char c_char = Py_CHARMASK(c); | ||
| + chunk = PyString_FromStringAndSize(&c_char, 1); | ||
| + if (chunk == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + if (PyList_Append(chunks, chunk)) { | ||
| + Py_DECREF(chunk); | ||
| + goto bail; | ||
| + } | ||
| + Py_DECREF(chunk); | ||
| + } | ||
| + | ||
| + rval = join_list_string(chunks); | ||
| + if (rval == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + Py_CLEAR(chunks); | ||
| + *next_end_ptr = end; | ||
| + return rval; | ||
| +bail: | ||
| + *next_end_ptr = -1; | ||
| + Py_XDECREF(chunks); | ||
| + return NULL; | ||
| +} | ||
| + | ||
| + | ||
| +static PyObject * | ||
| +scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr) | ||
| +{ | ||
| + /* Read the JSON string from PyUnicode pystr. | ||
| + end is the index of the first character after the quote. | ||
| + encoding is the encoding of pystr (must be an ASCII superset) | ||
| + if strict is zero then literal control characters are allowed | ||
| + *next_end_ptr is a return-by-reference index of the character | ||
| + after the end quote | ||
| + | ||
| + Return value is a new PyUnicode | ||
| + */ | ||
| + PyObject *rval; | ||
| + Py_ssize_t len = PyUnicode_GET_SIZE(pystr); | ||
| + Py_ssize_t begin = end - 1; | ||
| + Py_ssize_t next = begin; | ||
| + const Py_UNICODE *buf = PyUnicode_AS_UNICODE(pystr); | ||
| + PyObject *chunks = PyList_New(0); | ||
| + if (chunks == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + if (end < 0 || len <= end) { | ||
| + PyErr_SetString(PyExc_ValueError, "end is out of bounds"); | ||
| + goto bail; | ||
| + } | ||
| + while (1) { | ||
| + /* Find the end of the string or the next escape */ | ||
| + Py_UNICODE c = 0; | ||
| + PyObject *chunk = NULL; | ||
| + for (next = end; next < len; next++) { | ||
| + c = buf[next]; | ||
| + if (c == '"' || c == '\\') { | ||
| + break; | ||
| + } | ||
| + else if (strict && c <= 0x1f) { | ||
| + raise_errmsg("Invalid control character at", pystr, next); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + if (!(c == '"' || c == '\\')) { | ||
| + raise_errmsg("Unterminated string starting at", pystr, begin); | ||
| + goto bail; | ||
| + } | ||
| + /* Pick up this chunk if it's not zero length */ | ||
| + if (next != end) { | ||
| + chunk = PyUnicode_FromUnicode(&buf[end], next - end); | ||
| + if (chunk == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + if (PyList_Append(chunks, chunk)) { | ||
| + Py_DECREF(chunk); | ||
| + goto bail; | ||
| + } | ||
| + Py_DECREF(chunk); | ||
| + } | ||
| + next++; | ||
| + if (c == '"') { | ||
| + end = next; | ||
| + break; | ||
| + } | ||
| + if (next == len) { | ||
| + raise_errmsg("Unterminated string starting at", pystr, begin); | ||
| + goto bail; | ||
| + } | ||
| + c = buf[next]; | ||
| + if (c != 'u') { | ||
| + /* Non-unicode backslash escapes */ | ||
| + end = next + 1; | ||
| + switch (c) { | ||
| + case '"': break; | ||
| + case '\\': break; | ||
| + case '/': break; | ||
| + case 'b': c = '\b'; break; | ||
| + case 'f': c = '\f'; break; | ||
| + case 'n': c = '\n'; break; | ||
| + case 'r': c = '\r'; break; | ||
| + case 't': c = '\t'; break; | ||
| + default: c = 0; | ||
| + } | ||
| + if (c == 0) { | ||
| + raise_errmsg("Invalid \\escape", pystr, end - 2); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + else { | ||
| + c = 0; | ||
| + next++; | ||
| + end = next + 4; | ||
| + if (end >= len) { | ||
| + raise_errmsg("Invalid \\uXXXX escape", pystr, next - 1); | ||
| + goto bail; | ||
| + } | ||
| + /* Decode 4 hex digits */ | ||
| + for (; next < end; next++) { | ||
| + Py_UNICODE digit = buf[next]; | ||
| + c <<= 4; | ||
| + switch (digit) { | ||
| + case '0': case '1': case '2': case '3': case '4': | ||
| + case '5': case '6': case '7': case '8': case '9': | ||
| + c |= (digit - '0'); break; | ||
| + case 'a': case 'b': case 'c': case 'd': case 'e': | ||
| + case 'f': | ||
| + c |= (digit - 'a' + 10); break; | ||
| + case 'A': case 'B': case 'C': case 'D': case 'E': | ||
| + case 'F': | ||
| + c |= (digit - 'A' + 10); break; | ||
| + default: | ||
| + raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| +#ifdef Py_UNICODE_WIDE | ||
| + /* Surrogate pair */ | ||
| + if ((c & 0xfc00) == 0xd800) { | ||
| + Py_UNICODE c2 = 0; | ||
| + if (end + 6 >= len) { | ||
| + raise_errmsg("Unpaired high surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + if (buf[next++] != '\\' || buf[next++] != 'u') { | ||
| + raise_errmsg("Unpaired high surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + end += 6; | ||
| + /* Decode 4 hex digits */ | ||
| + for (; next < end; next++) { | ||
| + c2 <<= 4; | ||
| + Py_UNICODE digit = buf[next]; | ||
| + switch (digit) { | ||
| + case '0': case '1': case '2': case '3': case '4': | ||
| + case '5': case '6': case '7': case '8': case '9': | ||
| + c2 |= (digit - '0'); break; | ||
| + case 'a': case 'b': case 'c': case 'd': case 'e': | ||
| + case 'f': | ||
| + c2 |= (digit - 'a' + 10); break; | ||
| + case 'A': case 'B': case 'C': case 'D': case 'E': | ||
| + case 'F': | ||
| + c2 |= (digit - 'A' + 10); break; | ||
| + default: | ||
| + raise_errmsg("Invalid \\uXXXX escape", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + if ((c2 & 0xfc00) != 0xdc00) { | ||
| + raise_errmsg("Unpaired high surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| + c = 0x10000 + (((c - 0xd800) << 10) | (c2 - 0xdc00)); | ||
| + } | ||
| + else if ((c & 0xfc00) == 0xdc00) { | ||
| + raise_errmsg("Unpaired low surrogate", pystr, end - 5); | ||
| + goto bail; | ||
| + } | ||
| +#endif | ||
| + } | ||
| + chunk = PyUnicode_FromUnicode(&c, 1); | ||
| + if (chunk == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + if (PyList_Append(chunks, chunk)) { | ||
| + Py_DECREF(chunk); | ||
| + goto bail; | ||
| + } | ||
| + Py_DECREF(chunk); | ||
| + } | ||
| + | ||
| + rval = join_list_unicode(chunks); | ||
| + if (rval == NULL) { | ||
| + goto bail; | ||
| + } | ||
| + Py_DECREF(chunks); | ||
| + *next_end_ptr = end; | ||
| + return rval; | ||
| +bail: | ||
| + *next_end_ptr = -1; | ||
| + Py_XDECREF(chunks); | ||
| + return NULL; | ||
| +} | ||
| + | ||
| +PyDoc_STRVAR(pydoc_scanstring, | ||
| + "scanstring(basestring, end, encoding, strict=True) -> (str, end)\n" | ||
| + "\n" | ||
| + "Scan the string s for a JSON string. End is the index of the\n" | ||
| + "character in s after the quote that started the JSON string.\n" | ||
| + "Unescapes all valid JSON string escape sequences and raises ValueError\n" | ||
| + "on attempt to decode an invalid string. If strict is False then literal\n" | ||
| + "control characters are allowed in the string.\n" | ||
| + "\n" | ||
| + "Returns a tuple of the decoded string and the index of the character in s\n" | ||
| + "after the end quote." | ||
| +); | ||
| + | ||
| +static PyObject * | ||
| +py_scanstring(PyObject* self UNUSED, PyObject *args) | ||
| +{ | ||
| + PyObject *pystr; | ||
| + PyObject *rval; | ||
| + Py_ssize_t end; | ||
| + Py_ssize_t next_end = -1; | ||
| + char *encoding = NULL; | ||
| + int strict = 1; | ||
| + if (!PyArg_ParseTuple(args, "OO&|zi:scanstring", &pystr, _convertPyInt_AsSsize_t, &end, &encoding, &strict)) { | ||
| + return NULL; | ||
| + } | ||
| + if (encoding == NULL) { | ||
| + encoding = DEFAULT_ENCODING; | ||
| + } | ||
| + if (PyString_Check(pystr)) { | ||
| + rval = scanstring_str(pystr, end, encoding, strict, &next_end); | ||
| + } | ||
| + else if (PyUnicode_Check(pystr)) { | ||
| + rval = scanstring_unicode(pystr, end, strict, &next_end); | ||
| + } | ||
| + else { | ||
| + PyErr_Format(PyExc_TypeError, | ||
| + "first argument must be a string, not %.80s", | ||
| + Py_TYPE(pystr)->tp_name); | ||
| + return NULL; | ||
| + } | ||
| + return _build_rval_index_tuple(rval, next_end); | ||
| +} | ||
| + | ||
| +PyDoc_STRVAR(pydoc_encode_basestring_ascii, | ||
| + "encode_basestring_ascii(basestring) -> str\n" | ||
| + "\n" | ||
| + "Return an ASCII-only JSON representation of a Python string" | ||
| +); | ||
| + | ||
| +static PyObject * | ||
| +py_encode_basestring_ascii(PyObject* self UNUSED, PyObject *pystr) | ||
| +{ | ||
| + /* Return an ASCII-only JSON representation of a Python string */ | ||
| + /* METH_O */ | ||
| + if (PyString_Check(pystr)) { | ||
| + return ascii_escape_str(pystr); | ||
| + } | ||
| + else if (PyUnicode_Check(pystr)) { | ||
| + return ascii_escape_unicode(pystr); | ||
| + } | ||
| + else { | ||
| + PyErr_Format(PyExc_TypeError, | ||
| + "first argument must be a string, not %.80s", | ||
| + Py_TYPE(pystr)->tp_name); | ||
| + return NULL; | ||
| + } | ||
| +} | ||
| + | ||
| +static void | ||
| +scanner_dealloc(PyObject *self) | ||
| +{ | ||
| + /* Deallocate scanner object */ | ||
| + PyScannerObject *s; | ||
| + assert(PyScanner_Check(self)); | ||
| + s = (PyScannerObject *)self; | ||
| + Py_CLEAR(s->encoding); | ||
| + Py_CLEAR(s->strict); | ||
| + Py_CLEAR(s->object_hook); | ||
| + Py_CLEAR(s->parse_float); | ||
| + Py_CLEAR(s->parse_int); | ||
| + Py_CLEAR(s->parse_constant); | ||
| + self->ob_type->tp_free(self); | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON object from PyString pystr. | ||
| + idx is the index of the first character after the opening curly brace. | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the closing curly brace. | ||
| + | ||
| + Returns a new PyObject (usually a dict, but object_hook can change that) | ||
| + */ | ||
| + char *str = PyString_AS_STRING(pystr); | ||
| + Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; | ||
| + PyObject *rval = PyDict_New(); | ||
| + PyObject *key = NULL; | ||
| + PyObject *val = NULL; | ||
| + char *encoding = PyString_AS_STRING(s->encoding); | ||
| + int strict = PyObject_IsTrue(s->strict); | ||
| + Py_ssize_t next_idx; | ||
| + if (rval == NULL) | ||
| + return NULL; | ||
| + | ||
| + /* skip whitespace after { */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* only loop if the object is non-empty */ | ||
| + if (idx <= end_idx && str[idx] != '}') { | ||
| + while (idx <= end_idx) { | ||
| + /* read key */ | ||
| + if (str[idx] != '"') { | ||
| + raise_errmsg("Expecting property name", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + key = scanstring_str(pystr, idx + 1, encoding, strict, &next_idx); | ||
| + if (key == NULL) | ||
| + goto bail; | ||
| + idx = next_idx; | ||
| + | ||
| + /* skip whitespace between key and : delimiter, read :, skip whitespace */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + if (idx > end_idx || str[idx] != ':') { | ||
| + raise_errmsg("Expecting : delimiter", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + idx++; | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* read any JSON data type */ | ||
| + val = scan_once_str(s, pystr, idx, &next_idx); | ||
| + if (val == NULL) | ||
| + goto bail; | ||
| + | ||
| + if (PyDict_SetItem(rval, key, val) == -1) | ||
| + goto bail; | ||
| + | ||
| + Py_CLEAR(key); | ||
| + Py_CLEAR(val); | ||
| + idx = next_idx; | ||
| + | ||
| + /* skip whitespace before } or , */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* bail if the object is closed or we didn't get the , delimiter */ | ||
| + if (idx > end_idx) break; | ||
| + if (str[idx] == '}') { | ||
| + break; | ||
| + } | ||
| + else if (str[idx] != ',') { | ||
| + raise_errmsg("Expecting , delimiter", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + idx++; | ||
| + | ||
| + /* skip whitespace after , delimiter */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + } | ||
| + } | ||
| + /* verify that idx < end_idx, str[idx] should be '}' */ | ||
| + if (idx > end_idx || str[idx] != '}') { | ||
| + raise_errmsg("Expecting object", pystr, end_idx); | ||
| + goto bail; | ||
| + } | ||
| + /* if object_hook is not None: rval = object_hook(rval) */ | ||
| + if (s->object_hook != Py_None) { | ||
| + val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); | ||
| + if (val == NULL) | ||
| + goto bail; | ||
| + Py_DECREF(rval); | ||
| + rval = val; | ||
| + val = NULL; | ||
| + } | ||
| + *next_idx_ptr = idx + 1; | ||
| + return rval; | ||
| +bail: | ||
| + Py_XDECREF(key); | ||
| + Py_XDECREF(val); | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON object from PyUnicode pystr. | ||
| + idx is the index of the first character after the opening curly brace. | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the closing curly brace. | ||
| + | ||
| + Returns a new PyObject (usually a dict, but object_hook can change that) | ||
| + */ | ||
| + Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); | ||
| + Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; | ||
| + PyObject *val = NULL; | ||
| + PyObject *rval = PyDict_New(); | ||
| + PyObject *key = NULL; | ||
| + int strict = PyObject_IsTrue(s->strict); | ||
| + Py_ssize_t next_idx; | ||
| + if (rval == NULL) | ||
| + return NULL; | ||
| + | ||
| + /* skip whitespace after { */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* only loop if the object is non-empty */ | ||
| + if (idx <= end_idx && str[idx] != '}') { | ||
| + while (idx <= end_idx) { | ||
| + /* read key */ | ||
| + if (str[idx] != '"') { | ||
| + raise_errmsg("Expecting property name", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + key = scanstring_unicode(pystr, idx + 1, strict, &next_idx); | ||
| + if (key == NULL) | ||
| + goto bail; | ||
| + idx = next_idx; | ||
| + | ||
| + /* skip whitespace between key and : delimiter, read :, skip whitespace */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + if (idx > end_idx || str[idx] != ':') { | ||
| + raise_errmsg("Expecting : delimiter", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + idx++; | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* read any JSON term */ | ||
| + val = scan_once_unicode(s, pystr, idx, &next_idx); | ||
| + if (val == NULL) | ||
| + goto bail; | ||
| + | ||
| + if (PyDict_SetItem(rval, key, val) == -1) | ||
| + goto bail; | ||
| + | ||
| + Py_CLEAR(key); | ||
| + Py_CLEAR(val); | ||
| + idx = next_idx; | ||
| + | ||
| + /* skip whitespace before } or , */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* bail if the object is closed or we didn't get the , delimiter */ | ||
| + if (idx > end_idx) break; | ||
| + if (str[idx] == '}') { | ||
| + break; | ||
| + } | ||
| + else if (str[idx] != ',') { | ||
| + raise_errmsg("Expecting , delimiter", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + idx++; | ||
| + | ||
| + /* skip whitespace after , delimiter */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + } | ||
| + } | ||
| + | ||
| + /* verify that idx < end_idx, str[idx] should be '}' */ | ||
| + if (idx > end_idx || str[idx] != '}') { | ||
| + raise_errmsg("Expecting object", pystr, end_idx); | ||
| + goto bail; | ||
| + } | ||
| + | ||
| + /* if object_hook is not None: rval = object_hook(rval) */ | ||
| + if (s->object_hook != Py_None) { | ||
| + val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL); | ||
| + if (val == NULL) | ||
| + goto bail; | ||
| + Py_DECREF(rval); | ||
| + rval = val; | ||
| + val = NULL; | ||
| + } | ||
| + *next_idx_ptr = idx + 1; | ||
| + return rval; | ||
| +bail: | ||
| + Py_XDECREF(key); | ||
| + Py_XDECREF(val); | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_parse_array_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON array from PyString pystr. | ||
| + idx is the index of the first character after the opening brace. | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the closing brace. | ||
| + | ||
| + Returns a new PyList | ||
| + */ | ||
| + char *str = PyString_AS_STRING(pystr); | ||
| + Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; | ||
| + PyObject *val = NULL; | ||
| + PyObject *rval = PyList_New(0); | ||
| + Py_ssize_t next_idx; | ||
| + if (rval == NULL) | ||
| + return NULL; | ||
| + | ||
| + /* skip whitespace after [ */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* only loop if the array is non-empty */ | ||
| + if (idx <= end_idx && str[idx] != ']') { | ||
| + while (idx <= end_idx) { | ||
| + | ||
| + /* read any JSON term and de-tuplefy the (rval, idx) */ | ||
| + val = scan_once_str(s, pystr, idx, &next_idx); | ||
| + if (val == NULL) | ||
| + goto bail; | ||
| + | ||
| + if (PyList_Append(rval, val) == -1) | ||
| + goto bail; | ||
| + | ||
| + Py_CLEAR(val); | ||
| + idx = next_idx; | ||
| + | ||
| + /* skip whitespace between term and , */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* bail if the array is closed or we didn't get the , delimiter */ | ||
| + if (idx > end_idx) break; | ||
| + if (str[idx] == ']') { | ||
| + break; | ||
| + } | ||
| + else if (str[idx] != ',') { | ||
| + raise_errmsg("Expecting , delimiter", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + idx++; | ||
| + | ||
| + /* skip whitespace after , */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + } | ||
| + } | ||
| + | ||
| + /* verify that idx < end_idx, str[idx] should be ']' */ | ||
| + if (idx > end_idx || str[idx] != ']') { | ||
| + raise_errmsg("Expecting object", pystr, end_idx); | ||
| + goto bail; | ||
| + } | ||
| + *next_idx_ptr = idx + 1; | ||
| + return rval; | ||
| +bail: | ||
| + Py_XDECREF(val); | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_parse_array_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON array from PyString pystr. | ||
| + idx is the index of the first character after the opening brace. | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the closing brace. | ||
| + | ||
| + Returns a new PyList | ||
| + */ | ||
| + Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); | ||
| + Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; | ||
| + PyObject *val = NULL; | ||
| + PyObject *rval = PyList_New(0); | ||
| + Py_ssize_t next_idx; | ||
| + if (rval == NULL) | ||
| + return NULL; | ||
| + | ||
| + /* skip whitespace after [ */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* only loop if the array is non-empty */ | ||
| + if (idx <= end_idx && str[idx] != ']') { | ||
| + while (idx <= end_idx) { | ||
| + | ||
| + /* read any JSON term */ | ||
| + val = scan_once_unicode(s, pystr, idx, &next_idx); | ||
| + if (val == NULL) | ||
| + goto bail; | ||
| + | ||
| + if (PyList_Append(rval, val) == -1) | ||
| + goto bail; | ||
| + | ||
| + Py_CLEAR(val); | ||
| + idx = next_idx; | ||
| + | ||
| + /* skip whitespace between term and , */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + | ||
| + /* bail if the array is closed or we didn't get the , delimiter */ | ||
| + if (idx > end_idx) break; | ||
| + if (str[idx] == ']') { | ||
| + break; | ||
| + } | ||
| + else if (str[idx] != ',') { | ||
| + raise_errmsg("Expecting , delimiter", pystr, idx); | ||
| + goto bail; | ||
| + } | ||
| + idx++; | ||
| + | ||
| + /* skip whitespace after , */ | ||
| + while (idx <= end_idx && IS_WHITESPACE(str[idx])) idx++; | ||
| + } | ||
| + } | ||
| + | ||
| + /* verify that idx < end_idx, str[idx] should be ']' */ | ||
| + if (idx > end_idx || str[idx] != ']') { | ||
| + raise_errmsg("Expecting object", pystr, end_idx); | ||
| + goto bail; | ||
| + } | ||
| + *next_idx_ptr = idx + 1; | ||
| + return rval; | ||
| +bail: | ||
| + Py_XDECREF(val); | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON constant from PyString pystr. | ||
| + constant is the constant string that was found | ||
| + ("NaN", "Infinity", "-Infinity"). | ||
| + idx is the index of the first character of the constant | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the constant. | ||
| + | ||
| + Returns the result of parse_constant | ||
| + */ | ||
| + PyObject *cstr; | ||
| + PyObject *rval; | ||
| + /* constant is "NaN", "Infinity", or "-Infinity" */ | ||
| + cstr = PyString_InternFromString(constant); | ||
| + if (cstr == NULL) | ||
| + return NULL; | ||
| + | ||
| + /* rval = parse_constant(constant) */ | ||
| + rval = PyObject_CallFunctionObjArgs(s->parse_constant, cstr, NULL); | ||
| + idx += PyString_GET_SIZE(cstr); | ||
| + Py_DECREF(cstr); | ||
| + *next_idx_ptr = idx; | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON number from PyString pystr. | ||
| + idx is the index of the first character of the number | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the number. | ||
| + | ||
| + Returns a new PyObject representation of that number: | ||
| + PyInt, PyLong, or PyFloat. | ||
| + May return other types if parse_int or parse_float are set | ||
| + */ | ||
| + char *str = PyString_AS_STRING(pystr); | ||
| + Py_ssize_t end_idx = PyString_GET_SIZE(pystr) - 1; | ||
| + Py_ssize_t idx = start; | ||
| + int is_float = 0; | ||
| + PyObject *rval; | ||
| + PyObject *numstr; | ||
| + | ||
| + /* read a sign if it's there, make sure it's not the end of the string */ | ||
| + if (str[idx] == '-') { | ||
| + idx++; | ||
| + if (idx > end_idx) { | ||
| + PyErr_SetNone(PyExc_StopIteration); | ||
| + return NULL; | ||
| + } | ||
| + } | ||
| + | ||
| + /* read as many integer digits as we find as long as it doesn't start with 0 */ | ||
| + if (str[idx] >= '1' && str[idx] <= '9') { | ||
| + idx++; | ||
| + while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; | ||
| + } | ||
| + /* if it starts with 0 we only expect one integer digit */ | ||
| + else if (str[idx] == '0') { | ||
| + idx++; | ||
| + } | ||
| + /* no integer digits, error */ | ||
| + else { | ||
| + PyErr_SetNone(PyExc_StopIteration); | ||
| + return NULL; | ||
| + } | ||
| + | ||
| + /* if the next char is '.' followed by a digit then read all float digits */ | ||
| + if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') { | ||
| + is_float = 1; | ||
| + idx += 2; | ||
| + while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; | ||
| + } | ||
| + | ||
| + /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ | ||
| + if (idx < end_idx && (str[idx] == 'e' || str[idx] == 'E')) { | ||
| + | ||
| + /* save the index of the 'e' or 'E' just in case we need to backtrack */ | ||
| + Py_ssize_t e_start = idx; | ||
| + idx++; | ||
| + | ||
| + /* read an exponent sign if present */ | ||
| + if (idx < end_idx && (str[idx] == '-' || str[idx] == '+')) idx++; | ||
| + | ||
| + /* read all digits */ | ||
| + while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; | ||
| + | ||
| + /* if we got a digit, then parse as float. if not, backtrack */ | ||
| + if (str[idx - 1] >= '0' && str[idx - 1] <= '9') { | ||
| + is_float = 1; | ||
| + } | ||
| + else { | ||
| + idx = e_start; | ||
| + } | ||
| + } | ||
| + | ||
| + /* copy the section we determined to be a number */ | ||
| + numstr = PyString_FromStringAndSize(&str[start], idx - start); | ||
| + if (numstr == NULL) | ||
| + return NULL; | ||
| + if (is_float) { | ||
| + /* parse as a float using a fast path if available, otherwise call user defined method */ | ||
| + if (s->parse_float != (PyObject *)&PyFloat_Type) { | ||
| + rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); | ||
| + } | ||
| + else { | ||
| + rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); | ||
| + } | ||
| + } | ||
| + else { | ||
| + /* parse as an int using a fast path if available, otherwise call user defined method */ | ||
| + if (s->parse_int != (PyObject *)&PyInt_Type) { | ||
| + rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL); | ||
| + } | ||
| + else { | ||
| + rval = PyInt_FromString(PyString_AS_STRING(numstr), NULL, 10); | ||
| + } | ||
| + } | ||
| + Py_DECREF(numstr); | ||
| + *next_idx_ptr = idx; | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssize_t *next_idx_ptr) { | ||
| + /* Read a JSON number from PyUnicode pystr. | ||
| + idx is the index of the first character of the number | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the number. | ||
| + | ||
| + Returns a new PyObject representation of that number: | ||
| + PyInt, PyLong, or PyFloat. | ||
| + May return other types if parse_int or parse_float are set | ||
| + */ | ||
| + Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); | ||
| + Py_ssize_t end_idx = PyUnicode_GET_SIZE(pystr) - 1; | ||
| + Py_ssize_t idx = start; | ||
| + int is_float = 0; | ||
| + PyObject *rval; | ||
| + PyObject *numstr; | ||
| + | ||
| + /* read a sign if it's there, make sure it's not the end of the string */ | ||
| + if (str[idx] == '-') { | ||
| + idx++; | ||
| + if (idx > end_idx) { | ||
| + PyErr_SetNone(PyExc_StopIteration); | ||
| + return NULL; | ||
| + } | ||
| + } | ||
| + | ||
| + /* read as many integer digits as we find as long as it doesn't start with 0 */ | ||
| + if (str[idx] >= '1' && str[idx] <= '9') { | ||
| + idx++; | ||
| + while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; | ||
| + } | ||
| + /* if it starts with 0 we only expect one integer digit */ | ||
| + else if (str[idx] == '0') { | ||
| + idx++; | ||
| + } | ||
| + /* no integer digits, error */ | ||
| + else { | ||
| + PyErr_SetNone(PyExc_StopIteration); | ||
| + return NULL; | ||
| + } | ||
| + | ||
| + /* if the next char is '.' followed by a digit then read all float digits */ | ||
| + if (idx < end_idx && str[idx] == '.' && str[idx + 1] >= '0' && str[idx + 1] <= '9') { | ||
| + is_float = 1; | ||
| + idx += 2; | ||
| + while (idx < end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; | ||
| + } | ||
| + | ||
| + /* if the next char is 'e' or 'E' then maybe read the exponent (or backtrack) */ | ||
| + if (idx < end_idx && (str[idx] == 'e' || str[idx] == 'E')) { | ||
| + Py_ssize_t e_start = idx; | ||
| + idx++; | ||
| + | ||
| + /* read an exponent sign if present */ | ||
| + if (idx < end_idx && (str[idx] == '-' || str[idx] == '+')) idx++; | ||
| + | ||
| + /* read all digits */ | ||
| + while (idx <= end_idx && str[idx] >= '0' && str[idx] <= '9') idx++; | ||
| + | ||
| + /* if we got a digit, then parse as float. if not, backtrack */ | ||
| + if (str[idx - 1] >= '0' && str[idx - 1] <= '9') { | ||
| + is_float = 1; | ||
| + } | ||
| + else { | ||
| + idx = e_start; | ||
| + } | ||
| + } | ||
| + | ||
| + /* copy the section we determined to be a number */ | ||
| + numstr = PyUnicode_FromUnicode(&str[start], idx - start); | ||
| + if (numstr == NULL) | ||
| + return NULL; | ||
| + if (is_float) { | ||
| + /* parse as a float using a fast path if available, otherwise call user defined method */ | ||
| + if (s->parse_float != (PyObject *)&PyFloat_Type) { | ||
| + rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL); | ||
| + } | ||
| + else { | ||
| + rval = PyFloat_FromString(numstr, NULL); | ||
| + } | ||
| + } | ||
| + else { | ||
| + /* no fast path for unicode -> int, just call */ | ||
| + rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL); | ||
| + } | ||
| + Py_DECREF(numstr); | ||
| + *next_idx_ptr = idx; | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +scan_once_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) | ||
| +{ | ||
| + /* Read one JSON term (of any kind) from PyString pystr. | ||
| + idx is the index of the first character of the term | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the number. | ||
| + | ||
| + Returns a new PyObject representation of the term. | ||
| + */ | ||
| + char *str = PyString_AS_STRING(pystr); | ||
| + Py_ssize_t length = PyString_GET_SIZE(pystr); | ||
| + if (idx >= length) { | ||
| + PyErr_SetNone(PyExc_StopIteration); | ||
| + return NULL; | ||
| + } | ||
| + switch (str[idx]) { | ||
| + case '"': | ||
| + /* string */ | ||
| + return scanstring_str(pystr, idx + 1, | ||
| + PyString_AS_STRING(s->encoding), | ||
| + PyObject_IsTrue(s->strict), | ||
| + next_idx_ptr); | ||
| + case '{': | ||
| + /* object */ | ||
| + return _parse_object_str(s, pystr, idx + 1, next_idx_ptr); | ||
| + case '[': | ||
| + /* array */ | ||
| + return _parse_array_str(s, pystr, idx + 1, next_idx_ptr); | ||
| + case 'n': | ||
| + /* null */ | ||
| + if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { | ||
| + Py_INCREF(Py_None); | ||
| + *next_idx_ptr = idx + 4; | ||
| + return Py_None; | ||
| + } | ||
| + break; | ||
| + case 't': | ||
| + /* true */ | ||
| + if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { | ||
| + Py_INCREF(Py_True); | ||
| + *next_idx_ptr = idx + 4; | ||
| + return Py_True; | ||
| + } | ||
| + break; | ||
| + case 'f': | ||
| + /* false */ | ||
| + if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { | ||
| + Py_INCREF(Py_False); | ||
| + *next_idx_ptr = idx + 5; | ||
| + return Py_False; | ||
| + } | ||
| + break; | ||
| + case 'N': | ||
| + /* NaN */ | ||
| + if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { | ||
| + return _parse_constant(s, "NaN", idx, next_idx_ptr); | ||
| + } | ||
| + break; | ||
| + case 'I': | ||
| + /* Infinity */ | ||
| + if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { | ||
| + return _parse_constant(s, "Infinity", idx, next_idx_ptr); | ||
| + } | ||
| + break; | ||
| + case '-': | ||
| + /* -Infinity */ | ||
| + if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { | ||
| + return _parse_constant(s, "-Infinity", idx, next_idx_ptr); | ||
| + } | ||
| + break; | ||
| + } | ||
| + /* Didn't find a string, object, array, or named constant. Look for a number. */ | ||
| + return _match_number_str(s, pystr, idx, next_idx_ptr); | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +scan_once_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_t *next_idx_ptr) | ||
| +{ | ||
| + /* Read one JSON term (of any kind) from PyUnicode pystr. | ||
| + idx is the index of the first character of the term | ||
| + *next_idx_ptr is a return-by-reference index to the first character after | ||
| + the number. | ||
| + | ||
| + Returns a new PyObject representation of the term. | ||
| + */ | ||
| + Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); | ||
| + Py_ssize_t length = PyUnicode_GET_SIZE(pystr); | ||
| + if (idx >= length) { | ||
| + PyErr_SetNone(PyExc_StopIteration); | ||
| + return NULL; | ||
| + } | ||
| + switch (str[idx]) { | ||
| + case '"': | ||
| + /* string */ | ||
| + return scanstring_unicode(pystr, idx + 1, | ||
| + PyObject_IsTrue(s->strict), | ||
| + next_idx_ptr); | ||
| + case '{': | ||
| + /* object */ | ||
| + return _parse_object_unicode(s, pystr, idx + 1, next_idx_ptr); | ||
| + case '[': | ||
| + /* array */ | ||
| + return _parse_array_unicode(s, pystr, idx + 1, next_idx_ptr); | ||
| + case 'n': | ||
| + /* null */ | ||
| + if ((idx + 3 < length) && str[idx + 1] == 'u' && str[idx + 2] == 'l' && str[idx + 3] == 'l') { | ||
| + Py_INCREF(Py_None); | ||
| + *next_idx_ptr = idx + 4; | ||
| + return Py_None; | ||
| + } | ||
| + break; | ||
| + case 't': | ||
| + /* true */ | ||
| + if ((idx + 3 < length) && str[idx + 1] == 'r' && str[idx + 2] == 'u' && str[idx + 3] == 'e') { | ||
| + Py_INCREF(Py_True); | ||
| + *next_idx_ptr = idx + 4; | ||
| + return Py_True; | ||
| + } | ||
| + break; | ||
| + case 'f': | ||
| + /* false */ | ||
| + if ((idx + 4 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'l' && str[idx + 3] == 's' && str[idx + 4] == 'e') { | ||
| + Py_INCREF(Py_False); | ||
| + *next_idx_ptr = idx + 5; | ||
| + return Py_False; | ||
| + } | ||
| + break; | ||
| + case 'N': | ||
| + /* NaN */ | ||
| + if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 'N') { | ||
| + return _parse_constant(s, "NaN", idx, next_idx_ptr); | ||
| + } | ||
| + break; | ||
| + case 'I': | ||
| + /* Infinity */ | ||
| + if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && str[idx + 6] == 't' && str[idx + 7] == 'y') { | ||
| + return _parse_constant(s, "Infinity", idx, next_idx_ptr); | ||
| + } | ||
| + break; | ||
| + case '-': | ||
| + /* -Infinity */ | ||
| + if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') { | ||
| + return _parse_constant(s, "-Infinity", idx, next_idx_ptr); | ||
| + } | ||
| + break; | ||
| + } | ||
| + /* Didn't find a string, object, array, or named constant. Look for a number. */ | ||
| + return _match_number_unicode(s, pystr, idx, next_idx_ptr); | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +scanner_call(PyObject *self, PyObject *args, PyObject *kwds) | ||
| +{ | ||
| + /* Python callable interface to scan_once_{str,unicode} */ | ||
| + PyObject *pystr; | ||
| + PyObject *rval; | ||
| + Py_ssize_t idx; | ||
| + Py_ssize_t next_idx = -1; | ||
| + static char *kwlist[] = {"string", "idx", NULL}; | ||
| + PyScannerObject *s; | ||
| + assert(PyScanner_Check(self)); | ||
| + s = (PyScannerObject *)self; | ||
| + if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:scan_once", kwlist, &pystr, _convertPyInt_AsSsize_t, &idx)) | ||
| + return NULL; | ||
| + | ||
| + if (PyString_Check(pystr)) { | ||
| + rval = scan_once_str(s, pystr, idx, &next_idx); | ||
| + } | ||
| + else if (PyUnicode_Check(pystr)) { | ||
| + rval = scan_once_unicode(s, pystr, idx, &next_idx); | ||
| + } | ||
| + else { | ||
| + PyErr_Format(PyExc_TypeError, | ||
| + "first argument must be a string, not %.80s", | ||
| + Py_TYPE(pystr)->tp_name); | ||
| + return NULL; | ||
| + } | ||
| + return _build_rval_index_tuple(rval, next_idx); | ||
| +} | ||
| + | ||
| +static int | ||
| +scanner_init(PyObject *self, PyObject *args, PyObject *kwds) | ||
| +{ | ||
| + /* Initialize Scanner object */ | ||
| + PyObject *ctx; | ||
| + static char *kwlist[] = {"context", NULL}; | ||
| + PyScannerObject *s; | ||
| + | ||
| + assert(PyScanner_Check(self)); | ||
| + s = (PyScannerObject *)self; | ||
| + | ||
| + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:make_scanner", kwlist, &ctx)) | ||
| + return -1; | ||
| + | ||
| + s->encoding = NULL; | ||
| + s->strict = NULL; | ||
| + s->object_hook = NULL; | ||
| + s->parse_float = NULL; | ||
| + s->parse_int = NULL; | ||
| + s->parse_constant = NULL; | ||
| + | ||
| + /* PyString_AS_STRING is used on encoding */ | ||
| + s->encoding = PyObject_GetAttrString(ctx, "encoding"); | ||
| + if (s->encoding == Py_None) { | ||
| + Py_DECREF(Py_None); | ||
| + s->encoding = PyString_InternFromString(DEFAULT_ENCODING); | ||
| + } | ||
| + else if (PyUnicode_Check(s->encoding)) { | ||
| + PyObject *tmp = PyUnicode_AsEncodedString(s->encoding, NULL, NULL); | ||
| + Py_DECREF(s->encoding); | ||
| + s->encoding = tmp; | ||
| + } | ||
| + if (s->encoding == NULL || !PyString_Check(s->encoding)) | ||
| + goto bail; | ||
| + | ||
| + /* All of these will fail "gracefully" so we don't need to verify them */ | ||
| + s->strict = PyObject_GetAttrString(ctx, "strict"); | ||
| + if (s->strict == NULL) | ||
| + goto bail; | ||
| + s->object_hook = PyObject_GetAttrString(ctx, "object_hook"); | ||
| + if (s->object_hook == NULL) | ||
| + goto bail; | ||
| + s->parse_float = PyObject_GetAttrString(ctx, "parse_float"); | ||
| + if (s->parse_float == NULL) | ||
| + goto bail; | ||
| + s->parse_int = PyObject_GetAttrString(ctx, "parse_int"); | ||
| + if (s->parse_int == NULL) | ||
| + goto bail; | ||
| + s->parse_constant = PyObject_GetAttrString(ctx, "parse_constant"); | ||
| + if (s->parse_constant == NULL) | ||
| + goto bail; | ||
| + | ||
| + return 0; | ||
| + | ||
| +bail: | ||
| + Py_CLEAR(s->encoding); | ||
| + Py_CLEAR(s->strict); | ||
| + Py_CLEAR(s->object_hook); | ||
| + Py_CLEAR(s->parse_float); | ||
| + Py_CLEAR(s->parse_int); | ||
| + Py_CLEAR(s->parse_constant); | ||
| + return -1; | ||
| +} | ||
| + | ||
| +PyDoc_STRVAR(scanner_doc, "JSON scanner object"); | ||
| + | ||
| +static | ||
| +PyTypeObject PyScannerType = { | ||
| + PyObject_HEAD_INIT(0) | ||
| + 0, /* tp_internal */ | ||
| + "Scanner", /* tp_name */ | ||
| + sizeof(PyScannerObject), /* tp_basicsize */ | ||
| + 0, /* tp_itemsize */ | ||
| + scanner_dealloc, /* tp_dealloc */ | ||
| + 0, /* tp_print */ | ||
| + 0, /* tp_getattr */ | ||
| + 0, /* tp_setattr */ | ||
| + 0, /* tp_compare */ | ||
| + 0, /* tp_repr */ | ||
| + 0, /* tp_as_number */ | ||
| + 0, /* tp_as_sequence */ | ||
| + 0, /* tp_as_mapping */ | ||
| + 0, /* tp_hash */ | ||
| + scanner_call, /* tp_call */ | ||
| + 0, /* tp_str */ | ||
| + 0,/* PyObject_GenericGetAttr, */ /* tp_getattro */ | ||
| + 0,/* PyObject_GenericSetAttr, */ /* tp_setattro */ | ||
| + 0, /* tp_as_buffer */ | ||
| + Py_TPFLAGS_DEFAULT, /* tp_flags */ | ||
| + scanner_doc, /* tp_doc */ | ||
| + 0, /* tp_traverse */ | ||
| + 0, /* tp_clear */ | ||
| + 0, /* tp_richcompare */ | ||
| + 0, /* tp_weaklistoffset */ | ||
| + 0, /* tp_iter */ | ||
| + 0, /* tp_iternext */ | ||
| + 0, /* tp_methods */ | ||
| + scanner_members, /* tp_members */ | ||
| + 0, /* tp_getset */ | ||
| + 0, /* tp_base */ | ||
| + 0, /* tp_dict */ | ||
| + 0, /* tp_descr_get */ | ||
| + 0, /* tp_descr_set */ | ||
| + 0, /* tp_dictoffset */ | ||
| + scanner_init, /* tp_init */ | ||
| + 0,/* PyType_GenericAlloc, */ /* tp_alloc */ | ||
| + 0,/* PyType_GenericNew, */ /* tp_new */ | ||
| + 0,/* _PyObject_Del, */ /* tp_free */ | ||
| +}; | ||
| + | ||
| +static int | ||
| +encoder_init(PyObject *self, PyObject *args, PyObject *kwds) | ||
| +{ | ||
| + /* initialize Encoder object */ | ||
| + static char *kwlist[] = {"markers", "default", "encoder", "indent", "key_separator", "item_separator", "sort_keys", "skipkeys", "allow_nan", NULL}; | ||
| + | ||
| + PyEncoderObject *s; | ||
| + PyObject *allow_nan; | ||
| + | ||
| + assert(PyEncoder_Check(self)); | ||
| + s = (PyEncoderObject *)self; | ||
| + | ||
| + s->markers = NULL; | ||
| + s->defaultfn = NULL; | ||
| + s->encoder = NULL; | ||
| + s->indent = NULL; | ||
| + s->key_separator = NULL; | ||
| + s->item_separator = NULL; | ||
| + s->sort_keys = NULL; | ||
| + s->skipkeys = NULL; | ||
| + | ||
| + if (!PyArg_ParseTupleAndKeywords(args, kwds, "OOOOOOOOO:make_encoder", kwlist, | ||
| + &s->markers, &s->defaultfn, &s->encoder, &s->indent, &s->key_separator, &s->item_separator, &s->sort_keys, &s->skipkeys, &allow_nan)) | ||
| + return -1; | ||
| + | ||
| + Py_INCREF(s->markers); | ||
| + Py_INCREF(s->defaultfn); | ||
| + Py_INCREF(s->encoder); | ||
| + Py_INCREF(s->indent); | ||
| + Py_INCREF(s->key_separator); | ||
| + Py_INCREF(s->item_separator); | ||
| + Py_INCREF(s->sort_keys); | ||
| + Py_INCREF(s->skipkeys); | ||
| + s->fast_encode = (PyCFunction_Check(s->encoder) && PyCFunction_GetFunction(s->encoder) == (PyCFunction)py_encode_basestring_ascii); | ||
| + s->allow_nan = PyObject_IsTrue(allow_nan); | ||
| + return 0; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +encoder_call(PyObject *self, PyObject *args, PyObject *kwds) | ||
| +{ | ||
| + /* Python callable interface to encode_listencode_obj */ | ||
| + static char *kwlist[] = {"obj", "_current_indent_level", NULL}; | ||
| + PyObject *obj; | ||
| + PyObject *rval; | ||
| + Py_ssize_t indent_level; | ||
| + PyEncoderObject *s; | ||
| + assert(PyEncoder_Check(self)); | ||
| + s = (PyEncoderObject *)self; | ||
| + if (!PyArg_ParseTupleAndKeywords(args, kwds, "OO&:_iterencode", kwlist, | ||
| + &obj, _convertPyInt_AsSsize_t, &indent_level)) | ||
| + return NULL; | ||
| + rval = PyList_New(0); | ||
| + if (rval == NULL) | ||
| + return NULL; | ||
| + if (encoder_listencode_obj(s, rval, obj, indent_level)) { | ||
| + Py_DECREF(rval); | ||
| + return NULL; | ||
| + } | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +_encoded_const(PyObject *obj) | ||
| +{ | ||
| + /* Return the JSON string representation of None, True, False */ | ||
| + if (obj == Py_None) { | ||
| + static PyObject *s_null = NULL; | ||
| + if (s_null == NULL) { | ||
| + s_null = PyString_InternFromString("null"); | ||
| + } | ||
| + Py_INCREF(s_null); | ||
| + return s_null; | ||
| + } | ||
| + else if (obj == Py_True) { | ||
| + static PyObject *s_true = NULL; | ||
| + if (s_true == NULL) { | ||
| + s_true = PyString_InternFromString("true"); | ||
| + } | ||
| + Py_INCREF(s_true); | ||
| + return s_true; | ||
| + } | ||
| + else if (obj == Py_False) { | ||
| + static PyObject *s_false = NULL; | ||
| + if (s_false == NULL) { | ||
| + s_false = PyString_InternFromString("false"); | ||
| + } | ||
| + Py_INCREF(s_false); | ||
| + return s_false; | ||
| + } | ||
| + else { | ||
| + PyErr_SetString(PyExc_ValueError, "not a const"); | ||
| + return NULL; | ||
| + } | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +encoder_encode_float(PyEncoderObject *s, PyObject *obj) | ||
| +{ | ||
| + /* Return the JSON representation of a PyFloat */ | ||
| + double i = PyFloat_AS_DOUBLE(obj); | ||
| + if (!Py_IS_FINITE(i)) { | ||
| + if (!s->allow_nan) { | ||
| + PyErr_SetString(PyExc_ValueError, "Out of range float values are not JSON compliant"); | ||
| + return NULL; | ||
| + } | ||
| + if (i > 0) { | ||
| + return PyString_FromString("Infinity"); | ||
| + } | ||
| + else if (i < 0) { | ||
| + return PyString_FromString("-Infinity"); | ||
| + } | ||
| + else { | ||
| + return PyString_FromString("NaN"); | ||
| + } | ||
| + } | ||
| + /* Use a better float format here? */ | ||
| + return PyObject_Repr(obj); | ||
| +} | ||
| + | ||
| +static PyObject * | ||
| +encoder_encode_string(PyEncoderObject *s, PyObject *obj) | ||
| +{ | ||
| + /* Return the JSON representation of a string */ | ||
| + if (s->fast_encode) | ||
| + return py_encode_basestring_ascii(NULL, obj); | ||
| + else | ||
| + return PyObject_CallFunctionObjArgs(s->encoder, obj, NULL); | ||
| +} | ||
| + | ||
| +static int | ||
| +_steal_list_append(PyObject *lst, PyObject *stolen) | ||
| +{ | ||
| + /* Append stolen and then decrement its reference count */ | ||
| + int rval = PyList_Append(lst, stolen); | ||
| + Py_DECREF(stolen); | ||
| + return rval; | ||
| +} | ||
| + | ||
| +static int | ||
| +encoder_listencode_obj(PyEncoderObject *s, PyObject *rval, PyObject *obj, Py_ssize_t indent_level) | ||
| +{ | ||
| + /* Encode Python object obj to a JSON term, rval is a PyList */ | ||
| + PyObject *newobj; | ||
| + int rv; | ||
| + | ||
| + if (obj == Py_None || obj == Py_True || obj == Py_False) { | ||
| + PyObject *cstr = _encoded_const(obj); | ||
| + if (cstr == NULL) | ||
| + return -1; | ||
| + return _steal_list_append(rval, cstr); | ||
| + } | ||
| + else if (PyString_Check(obj) || PyUnicode_Check(obj)) | ||
| + { | ||
| + PyObject *encoded = encoder_encode_string(s, obj); | ||
| + if (encoded == NULL) | ||
| + return -1; | ||
| + return _steal_list_append(rval, encoded); | ||
| + } | ||
| + else if (PyInt_Check(obj) || PyLong_Check(obj)) { | ||
| + PyObject *encoded = PyObject_Str(obj); | ||
| + if (encoded == NULL) | ||
| + return -1; | ||
| + return _steal_list_append(rval, encoded); | ||
| + } | ||
| + else if (PyFloat_Check(obj)) { | ||
| + PyObject *encoded = encoder_encode_float(s, obj); | ||
| + if (encoded == NULL) | ||
| + return -1; | ||
| + return _steal_list_append(rval, encoded); | ||
| + } | ||
| + else if (PyList_Check(obj) || PyTuple_Check(obj)) { | ||
| + return encoder_listencode_list(s, rval, obj, indent_level); | ||
| + } | ||
| + else if (PyDict_Check(obj)) { | ||
| + return encoder_listencode_dict(s, rval, obj, indent_level); | ||
| + } | ||
| + else { | ||
| + PyObject *ident = NULL; | ||
| + if (s->markers != Py_None) { | ||
| + int has_key; | ||
| + ident = PyLong_FromVoidPtr(obj); | ||
| + if (ident == NULL) | ||
| + return -1; | ||
| + has_key = PyDict_Contains(s->markers, ident); | ||
| + if (has_key) { | ||
| + if (has_key != -1) | ||
| + PyErr_SetString(PyExc_ValueError, "Circular reference detected"); | ||
| + Py_DECREF(ident); | ||
| + return -1; | ||
| + } | ||
| + if (PyDict_SetItem(s->markers, ident, obj)) { | ||
| + Py_DECREF(ident); | ||
| + return -1; | ||
| + } | ||
| + } | ||
| + newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL); | ||
| + if (newobj == NULL) { | ||
| + Py_XDECREF(ident); | ||
| + return -1; | ||
| + } | ||
| + rv = encoder_listencode_obj(s, rval, newobj, indent_level); | ||
| + Py_DECREF(newobj); | ||
| + if (rv) { | ||
| + Py_XDECREF(ident); | ||
| + return -1; | ||
| + } | ||
| + if (ident != NULL) { | ||
| + if (PyDict_DelItem(s->markers, ident)) { | ||
| + Py_XDECREF(ident); | ||
| + return -1; | ||
| + } | ||
| + Py_XDECREF(ident); | ||
| + } | ||
| + return rv; | ||
| + } | ||
| +} | ||
| + | ||
| +static int | ||
| +encoder_listencode_dict(PyEncoderObject *s, PyObject *rval, PyObject *dct, Py_ssize_t indent_level) | ||
| +{ | ||
| + /* Encode Python dict dct a JSON term, rval is a PyList */ | ||
| + static PyObject *open_dict = NULL; | ||
| + static PyObject *close_dict = NULL; | ||
| + static PyObject *empty_dict = NULL; | ||
| + PyObject *kstr = NULL; | ||
| + PyObject *ident = NULL; | ||
| + PyObject *key, *value; | ||
| + Py_ssize_t pos; | ||
| + int skipkeys; | ||
| + Py_ssize_t idx; | ||
| + | ||
| + if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) { | ||
| + open_dict = PyString_InternFromString("{"); | ||
| + close_dict = PyString_InternFromString("}"); | ||
| + empty_dict = PyString_InternFromString("{}"); | ||
| + if (open_dict == NULL || close_dict == NULL || empty_dict == NULL) | ||
| + return -1; | ||
| + } | ||
| + if (PyDict_Size(dct) == 0) | ||
| + return PyList_Append(rval, empty_dict); | ||
| + | ||
| + if (s->markers != Py_None) { | ||
| + int has_key; | ||
| + ident = PyLong_FromVoidPtr(dct); | ||
| + if (ident == NULL) | ||
| + goto bail; | ||
| + has_key = PyDict_Contains(s->markers, ident); | ||
| + if (has_key) { | ||
| + if (has_key != -1) | ||
| + PyErr_SetString(PyExc_ValueError, "Circular reference detected"); | ||
| + goto bail; | ||
| + } | ||
| + if (PyDict_SetItem(s->markers, ident, dct)) { | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + | ||
| + if (PyList_Append(rval, open_dict)) | ||
| + goto bail; | ||
| + | ||
| + if (s->indent != Py_None) { | ||
| + /* TODO: DOES NOT RUN */ | ||
| + indent_level += 1; | ||
| + /* | ||
| + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + separator = _item_separator + newline_indent | ||
| + buf += newline_indent | ||
| + */ | ||
| + } | ||
| + | ||
| + /* TODO: C speedup not implemented for sort_keys */ | ||
| + | ||
| + pos = 0; | ||
| + skipkeys = PyObject_IsTrue(s->skipkeys); | ||
| + idx = 0; | ||
| + while (PyDict_Next(dct, &pos, &key, &value)) { | ||
| + PyObject *encoded; | ||
| + | ||
| + if (PyString_Check(key) || PyUnicode_Check(key)) { | ||
| + Py_INCREF(key); | ||
| + kstr = key; | ||
| + } | ||
| + else if (PyFloat_Check(key)) { | ||
| + kstr = encoder_encode_float(s, key); | ||
| + if (kstr == NULL) | ||
| + goto bail; | ||
| + } | ||
| + else if (PyInt_Check(key) || PyLong_Check(key)) { | ||
| + kstr = PyObject_Str(key); | ||
| + if (kstr == NULL) | ||
| + goto bail; | ||
| + } | ||
| + else if (key == Py_True || key == Py_False || key == Py_None) { | ||
| + kstr = _encoded_const(key); | ||
| + if (kstr == NULL) | ||
| + goto bail; | ||
| + } | ||
| + else if (skipkeys) { | ||
| + continue; | ||
| + } | ||
| + else { | ||
| + /* TODO: include repr of key */ | ||
| + PyErr_SetString(PyExc_ValueError, "keys must be a string"); | ||
| + goto bail; | ||
| + } | ||
| + | ||
| + if (idx) { | ||
| + if (PyList_Append(rval, s->item_separator)) | ||
| + goto bail; | ||
| + } | ||
| + | ||
| + encoded = encoder_encode_string(s, kstr); | ||
| + Py_CLEAR(kstr); | ||
| + if (encoded == NULL) | ||
| + goto bail; | ||
| + if (PyList_Append(rval, encoded)) { | ||
| + Py_DECREF(encoded); | ||
| + goto bail; | ||
| + } | ||
| + Py_DECREF(encoded); | ||
| + if (PyList_Append(rval, s->key_separator)) | ||
| + goto bail; | ||
| + if (encoder_listencode_obj(s, rval, value, indent_level)) | ||
| + goto bail; | ||
| + idx += 1; | ||
| + } | ||
| + if (ident != NULL) { | ||
| + if (PyDict_DelItem(s->markers, ident)) | ||
| + goto bail; | ||
| + Py_CLEAR(ident); | ||
| + } | ||
| + if (s->indent != Py_None) { | ||
| + /* TODO: DOES NOT RUN */ | ||
| + indent_level -= 1; | ||
| + /* | ||
| + yield '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + */ | ||
| + } | ||
| + if (PyList_Append(rval, close_dict)) | ||
| + goto bail; | ||
| + return 0; | ||
| + | ||
| +bail: | ||
| + Py_XDECREF(kstr); | ||
| + Py_XDECREF(ident); | ||
| + return -1; | ||
| +} | ||
| + | ||
| + | ||
| +static int | ||
| +encoder_listencode_list(PyEncoderObject *s, PyObject *rval, PyObject *seq, Py_ssize_t indent_level) | ||
| +{ | ||
| + /* Encode Python list seq to a JSON term, rval is a PyList */ | ||
| + static PyObject *open_array = NULL; | ||
| + static PyObject *close_array = NULL; | ||
| + static PyObject *empty_array = NULL; | ||
| + PyObject *ident = NULL; | ||
| + PyObject *s_fast = NULL; | ||
| + Py_ssize_t num_items; | ||
| + PyObject **seq_items; | ||
| + Py_ssize_t i; | ||
| + | ||
| + if (open_array == NULL || close_array == NULL || empty_array == NULL) { | ||
| + open_array = PyString_InternFromString("["); | ||
| + close_array = PyString_InternFromString("]"); | ||
| + empty_array = PyString_InternFromString("[]"); | ||
| + if (open_array == NULL || close_array == NULL || empty_array == NULL) | ||
| + return -1; | ||
| + } | ||
| + ident = NULL; | ||
| + s_fast = PySequence_Fast(seq, "_iterencode_list needs a sequence"); | ||
| + if (s_fast == NULL) | ||
| + return -1; | ||
| + num_items = PySequence_Fast_GET_SIZE(s_fast); | ||
| + if (num_items == 0) { | ||
| + Py_DECREF(s_fast); | ||
| + return PyList_Append(rval, empty_array); | ||
| + } | ||
| + | ||
| + if (s->markers != Py_None) { | ||
| + int has_key; | ||
| + ident = PyLong_FromVoidPtr(seq); | ||
| + if (ident == NULL) | ||
| + goto bail; | ||
| + has_key = PyDict_Contains(s->markers, ident); | ||
| + if (has_key) { | ||
| + if (has_key != -1) | ||
| + PyErr_SetString(PyExc_ValueError, "Circular reference detected"); | ||
| + goto bail; | ||
| + } | ||
| + if (PyDict_SetItem(s->markers, ident, seq)) { | ||
| + goto bail; | ||
| + } | ||
| + } | ||
| + | ||
| + seq_items = PySequence_Fast_ITEMS(s_fast); | ||
| + if (PyList_Append(rval, open_array)) | ||
| + goto bail; | ||
| + if (s->indent != Py_None) { | ||
| + /* TODO: DOES NOT RUN */ | ||
| + indent_level += 1; | ||
| + /* | ||
| + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + separator = _item_separator + newline_indent | ||
| + buf += newline_indent | ||
| + */ | ||
| + } | ||
| + for (i = 0; i < num_items; i++) { | ||
| + PyObject *obj = seq_items[i]; | ||
| + if (i) { | ||
| + if (PyList_Append(rval, s->item_separator)) | ||
| + goto bail; | ||
| + } | ||
| + if (encoder_listencode_obj(s, rval, obj, indent_level)) | ||
| + goto bail; | ||
| + } | ||
| + if (ident != NULL) { | ||
| + if (PyDict_DelItem(s->markers, ident)) | ||
| + goto bail; | ||
| + Py_CLEAR(ident); | ||
| + } | ||
| + if (s->indent != Py_None) { | ||
| + /* TODO: DOES NOT RUN */ | ||
| + indent_level -= 1; | ||
| + /* | ||
| + yield '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + */ | ||
| + } | ||
| + if (PyList_Append(rval, close_array)) | ||
| + goto bail; | ||
| + Py_DECREF(s_fast); | ||
| + return 0; | ||
| + | ||
| +bail: | ||
| + Py_XDECREF(ident); | ||
| + Py_DECREF(s_fast); | ||
| + return -1; | ||
| +} | ||
| + | ||
| +static void | ||
| +encoder_dealloc(PyObject *self) | ||
| +{ | ||
| + /* Deallocate Encoder */ | ||
| + PyEncoderObject *s; | ||
| + assert(PyEncoder_Check(self)); | ||
| + s = (PyEncoderObject *)self; | ||
| + Py_CLEAR(s->markers); | ||
| + Py_CLEAR(s->defaultfn); | ||
| + Py_CLEAR(s->encoder); | ||
| + Py_CLEAR(s->indent); | ||
| + Py_CLEAR(s->key_separator); | ||
| + Py_CLEAR(s->item_separator); | ||
| + Py_CLEAR(s->sort_keys); | ||
| + Py_CLEAR(s->skipkeys); | ||
| + self->ob_type->tp_free(self); | ||
| +} | ||
| + | ||
| +PyDoc_STRVAR(encoder_doc, "_iterencode(obj, _current_indent_level) -> iterable"); | ||
| + | ||
| +static | ||
| +PyTypeObject PyEncoderType = { | ||
| + PyObject_HEAD_INIT(0) | ||
| + 0, /* tp_internal */ | ||
| + "Encoder", /* tp_name */ | ||
| + sizeof(PyEncoderObject), /* tp_basicsize */ | ||
| + 0, /* tp_itemsize */ | ||
| + encoder_dealloc, /* tp_dealloc */ | ||
| + 0, /* tp_print */ | ||
| + 0, /* tp_getattr */ | ||
| + 0, /* tp_setattr */ | ||
| + 0, /* tp_compare */ | ||
| + 0, /* tp_repr */ | ||
| + 0, /* tp_as_number */ | ||
| + 0, /* tp_as_sequence */ | ||
| + 0, /* tp_as_mapping */ | ||
| + 0, /* tp_hash */ | ||
| + encoder_call, /* tp_call */ | ||
| + 0, /* tp_str */ | ||
| + 0,/* PyObject_GenericGetAttr, */ /* tp_getattro */ | ||
| + 0,/* PyObject_GenericSetAttr, */ /* tp_setattro */ | ||
| + 0, /* tp_as_buffer */ | ||
| + Py_TPFLAGS_DEFAULT, /* tp_flags */ | ||
| + encoder_doc, /* tp_doc */ | ||
| + 0, /* tp_traverse */ | ||
| + 0, /* tp_clear */ | ||
| + 0, /* tp_richcompare */ | ||
| + 0, /* tp_weaklistoffset */ | ||
| + 0, /* tp_iter */ | ||
| + 0, /* tp_iternext */ | ||
| + 0, /* tp_methods */ | ||
| + encoder_members, /* tp_members */ | ||
| + 0, /* tp_getset */ | ||
| + 0, /* tp_base */ | ||
| + 0, /* tp_dict */ | ||
| + 0, /* tp_descr_get */ | ||
| + 0, /* tp_descr_set */ | ||
| + 0, /* tp_dictoffset */ | ||
| + encoder_init, /* tp_init */ | ||
| + 0,/* PyType_GenericAlloc, */ /* tp_alloc */ | ||
| + 0,/* PyType_GenericNew, */ /* tp_new */ | ||
| + 0,/* _PyObject_Del, */ /* tp_free */ | ||
| +}; | ||
| + | ||
| +static PyMethodDef speedups_methods[] = { | ||
| + {"encode_basestring_ascii", | ||
| + (PyCFunction)py_encode_basestring_ascii, | ||
| + METH_O, | ||
| + pydoc_encode_basestring_ascii}, | ||
| + {"scanstring", | ||
| + (PyCFunction)py_scanstring, | ||
| + METH_VARARGS, | ||
| + pydoc_scanstring}, | ||
| + {NULL, NULL, 0, NULL} | ||
| +}; | ||
| + | ||
| +PyDoc_STRVAR(module_doc, | ||
| +"simplejson speedups\n"); | ||
| + | ||
| +void | ||
| +init_speedups(void) | ||
| +{ | ||
| + PyObject *m; | ||
| + PyScannerType.tp_getattro = PyObject_GenericGetAttr; | ||
| + PyScannerType.tp_setattro = PyObject_GenericSetAttr; | ||
| + PyScannerType.tp_alloc = PyType_GenericAlloc; | ||
| + PyScannerType.tp_new = PyType_GenericNew; | ||
| + PyScannerType.tp_free = _PyObject_Del; | ||
| + if (PyType_Ready(&PyScannerType) < 0) | ||
| + return; | ||
| + PyEncoderType.tp_getattro = PyObject_GenericGetAttr; | ||
| + PyEncoderType.tp_setattro = PyObject_GenericSetAttr; | ||
| + PyEncoderType.tp_alloc = PyType_GenericAlloc; | ||
| + PyEncoderType.tp_new = PyType_GenericNew; | ||
| + PyEncoderType.tp_free = _PyObject_Del; | ||
| + if (PyType_Ready(&PyEncoderType) < 0) | ||
| + return; | ||
| + m = Py_InitModule3("_speedups", speedups_methods, module_doc); | ||
| + Py_INCREF((PyObject*)&PyScannerType); | ||
| + PyModule_AddObject(m, "make_scanner", (PyObject*)&PyScannerType); | ||
| + Py_INCREF((PyObject*)&PyEncoderType); | ||
| + PyModule_AddObject(m, "make_encoder", (PyObject*)&PyEncoderType); | ||
| +} |
348
simplejson/decoder.py
| @@ -0,0 +1,348 @@ | ||
| +"""Implementation of JSONDecoder | ||
| +""" | ||
| +import re | ||
| +import sys | ||
| +import struct | ||
| + | ||
| +from simplejson.scanner import make_scanner | ||
| +try: | ||
| + from simplejson._speedups import scanstring as c_scanstring | ||
| +except ImportError: | ||
| + c_scanstring = None | ||
| + | ||
| +__all__ = ['JSONDecoder'] | ||
| + | ||
| +FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL | ||
| + | ||
| +def _floatconstants(): | ||
| + _BYTES = '7FF80000000000007FF0000000000000'.decode('hex') | ||
| + if sys.byteorder != 'big': | ||
| + _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] | ||
| + nan, inf = struct.unpack('dd', _BYTES) | ||
| + return nan, inf, -inf | ||
| + | ||
| +NaN, PosInf, NegInf = _floatconstants() | ||
| + | ||
| + | ||
| +def linecol(doc, pos): | ||
| + lineno = doc.count('\n', 0, pos) + 1 | ||
| + if lineno == 1: | ||
| + colno = pos | ||
| + else: | ||
| + colno = pos - doc.rindex('\n', 0, pos) | ||
| + return lineno, colno | ||
| + | ||
| + | ||
| +def errmsg(msg, doc, pos, end=None): | ||
| + # Note that this function is called from _speedups | ||
| + lineno, colno = linecol(doc, pos) | ||
| + if end is None: | ||
| + return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos) | ||
| + endlineno, endcolno = linecol(doc, end) | ||
| + return '%s: line %d column %d - line %d column %d (char %d - %d)' % ( | ||
| + msg, lineno, colno, endlineno, endcolno, pos, end) | ||
| + | ||
| + | ||
| +_CONSTANTS = { | ||
| + '-Infinity': NegInf, | ||
| + 'Infinity': PosInf, | ||
| + 'NaN': NaN, | ||
| +} | ||
| + | ||
| +STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) | ||
| +BACKSLASH = { | ||
| + '"': u'"', '\\': u'\\', '/': u'/', | ||
| + 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', | ||
| +} | ||
| + | ||
| +DEFAULT_ENCODING = "utf-8" | ||
| + | ||
| +def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match): | ||
| + """Scan the string s for a JSON string. End is the index of the | ||
| + character in s after the quote that started the JSON string. | ||
| + Unescapes all valid JSON string escape sequences and raises ValueError | ||
| + on attempt to decode an invalid string. If strict is False then literal | ||
| + control characters are allowed in the string. | ||
| + | ||
| + Returns a tuple of the decoded string and the index of the character in s | ||
| + after the end quote.""" | ||
| + if encoding is None: | ||
| + encoding = DEFAULT_ENCODING | ||
| + chunks = [] | ||
| + _append = chunks.append | ||
| + begin = end - 1 | ||
| + while 1: | ||
| + chunk = _m(s, end) | ||
| + if chunk is None: | ||
| + raise ValueError( | ||
| + errmsg("Unterminated string starting at", s, begin)) | ||
| + end = chunk.end() | ||
| + content, terminator = chunk.groups() | ||
| + # Content is contains zero or more unescaped string characters | ||
| + if content: | ||
| + if not isinstance(content, unicode): | ||
| + content = unicode(content, encoding) | ||
| + _append(content) | ||
| + # Terminator is the end of string, a literal control character, | ||
| + # or a backslash denoting that an escape sequence follows | ||
| + if terminator == '"': | ||
| + break | ||
| + elif terminator != '\\': | ||
| + if strict: | ||
| + msg = "Invalid control character %r at" % (terminator,) | ||
| + raise ValueError(msg, s, end) | ||
| + else: | ||
| + _append(terminator) | ||
| + continue | ||
| + try: | ||
| + esc = s[end] | ||
| + except IndexError: | ||
| + raise ValueError( | ||
| + errmsg("Unterminated string starting at", s, begin)) | ||
| + # If not a unicode escape sequence, must be in the lookup table | ||
| + if esc != 'u': | ||
| + try: | ||
| + char = _b[esc] | ||
| + except KeyError: | ||
| + raise ValueError( | ||
| + errmsg("Invalid \\escape: %r" % (esc,), s, end)) | ||
| + end += 1 | ||
| + else: | ||
| + # Unicode escape sequence | ||
| + esc = s[end + 1:end + 5] | ||
| + next_end = end + 5 | ||
| + if len(esc) != 4: | ||
| + msg = "Invalid \\uXXXX escape" | ||
| + raise ValueError(errmsg(msg, s, end)) | ||
| + uni = int(esc, 16) | ||
| + # Check for surrogate pair on UCS-4 systems | ||
| + if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535: | ||
| + msg = "Invalid \\uXXXX\\uXXXX surrogate pair" | ||
| + if not s[end + 5:end + 7] == '\\u': | ||
| + raise ValueError(errmsg(msg, s, end)) | ||
| + esc2 = s[end + 7:end + 11] | ||
| + if len(esc2) != 4: | ||
| + raise ValueError(errmsg(msg, s, end)) | ||
| + uni2 = int(esc2, 16) | ||
| + uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) | ||
| + next_end += 6 | ||
| + char = unichr(uni) | ||
| + end = next_end | ||
| + # Append the unescaped character | ||
| + _append(char) | ||
| + return u''.join(chunks), end | ||
| + | ||
| + | ||
| +# Use speedup if available | ||
| +scanstring = c_scanstring or py_scanstring | ||
| + | ||
| +WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) | ||
| +WHITESPACE_STR = ' \t\n\r' | ||
| + | ||
| +def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR): | ||
| + pairs = {} | ||
| + # Use a slice to prevent IndexError from being raised, the following | ||
| + # check will raise a more specific ValueError if the string is empty | ||
| + nextchar = s[end:end + 1] | ||
| + # Normally we expect nextchar == '"' | ||
| + if nextchar != '"': | ||
| + if nextchar in _ws: | ||
| + end = _w(s, end).end() | ||
| + nextchar = s[end:end + 1] | ||
| + # Trivial empty object | ||
| + if nextchar == '}': | ||
| + return pairs, end + 1 | ||
| + elif nextchar != '"': | ||
| + raise ValueError(errmsg("Expecting property name", s, end)) | ||
| + end += 1 | ||
| + while True: | ||
| + key, end = scanstring(s, end, encoding, strict) | ||
| + | ||
| + # To skip some function call overhead we optimize the fast paths where | ||
| + # the JSON key separator is ": " or just ":". | ||
| + if s[end:end + 1] != ':': | ||
| + end = _w(s, end).end() | ||
| + if s[end:end + 1] != ':': | ||
| + raise ValueError(errmsg("Expecting : delimiter", s, end)) | ||
| + | ||
| + end += 1 | ||
| + | ||
| + try: | ||
| + if s[end] in _ws: | ||
| + end += 1 | ||
| + if s[end] in _ws: | ||
| + end = _w(s, end + 1).end() | ||
| + except IndexError: | ||
| + pass | ||
| + | ||
| + try: | ||
| + value, end = scan_once(s, end) | ||
| + except StopIteration: | ||
| + raise ValueError(errmsg("Expecting object", s, end)) | ||
| + pairs[key] = value | ||
| + | ||
| + try: | ||
| + nextchar = s[end] | ||
| + if nextchar in _ws: | ||
| + end = _w(s, end + 1).end() | ||
| + nextchar = s[end] | ||
| + except IndexError: | ||
| + nextchar = '' | ||
| + end += 1 | ||
| + | ||
| + if nextchar == '}': | ||
| + break | ||
| + elif nextchar != ',': | ||
| + raise ValueError(errmsg("Expecting , delimiter", s, end - 1)) | ||
| + | ||
| + try: | ||
| + nextchar = s[end] | ||
| + if nextchar in _ws: | ||
| + end += 1 | ||
| + nextchar = s[end] | ||
| + if nextchar in _ws: | ||
| + end = _w(s, end + 1).end() | ||
| + nextchar = s[end] | ||
| + except IndexError: | ||
| + nextchar = '' | ||
| + | ||
| + end += 1 | ||
| + if nextchar != '"': | ||
| + raise ValueError(errmsg("Expecting property name", s, end - 1)) | ||
| + | ||
| + if object_hook is not None: | ||
| + pairs = object_hook(pairs) | ||
| + return pairs, end | ||
| + | ||
| +def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): | ||
| + values = [] | ||
| + nextchar = s[end:end + 1] | ||
| + if nextchar in _ws: | ||
| + end = _w(s, end + 1).end() | ||
| + nextchar = s[end:end + 1] | ||
| + # Look-ahead for trivial empty array | ||
| + if nextchar == ']': | ||
| + return values, end + 1 | ||
| + _append = values.append | ||
| + while True: | ||
| + try: | ||
| + value, end = scan_once(s, end) | ||
| + except StopIteration: | ||
| + raise ValueError(errmsg("Expecting object", s, end)) | ||
| + _append(value) | ||
| + nextchar = s[end:end + 1] | ||
| + if nextchar in _ws: | ||
| + end = _w(s, end + 1).end() | ||
| + nextchar = s[end:end + 1] | ||
| + end += 1 | ||
| + if nextchar == ']': | ||
| + break | ||
| + elif nextchar != ',': | ||
| + raise ValueError(errmsg("Expecting , delimiter", s, end)) | ||
| + | ||
| + try: | ||
| + if s[end] in _ws: | ||
| + end += 1 | ||
| + if s[end] in _ws: | ||
| + end = _w(s, end + 1).end() | ||
| + except IndexError: | ||
| + pass | ||
| + | ||
| + return values, end | ||
| + | ||
| +class JSONDecoder(object): | ||
| + """Simple JSON <http://json.org> decoder | ||
| + | ||
| + Performs the following translations in decoding by default: | ||
| + | ||
| + +---------------+-------------------+ | ||
| + | JSON | Python | | ||
| + +===============+===================+ | ||
| + | object | dict | | ||
| + +---------------+-------------------+ | ||
| + | array | list | | ||
| + +---------------+-------------------+ | ||
| + | string | unicode | | ||
| + +---------------+-------------------+ | ||
| + | number (int) | int, long | | ||
| + +---------------+-------------------+ | ||
| + | number (real) | float | | ||
| + +---------------+-------------------+ | ||
| + | true | True | | ||
| + +---------------+-------------------+ | ||
| + | false | False | | ||
| + +---------------+-------------------+ | ||
| + | null | None | | ||
| + +---------------+-------------------+ | ||
| + | ||
| + It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as | ||
| + their corresponding ``float`` values, which is outside the JSON spec. | ||
| + | ||
| + """ | ||
| + | ||
| + def __init__(self, encoding=None, object_hook=None, parse_float=None, | ||
| + parse_int=None, parse_constant=None, strict=True): | ||
| + """``encoding`` determines the encoding used to interpret any ``str`` | ||
| + objects decoded by this instance (utf-8 by default). It has no | ||
| + effect when decoding ``unicode`` objects. | ||
| + | ||
| + Note that currently only encodings that are a superset of ASCII work, | ||
| + strings of other encodings should be passed in as ``unicode``. | ||
| + | ||
| + ``object_hook``, if specified, will be called with the result | ||
| + of every JSON object decoded and its return value will be used in | ||
| + place of the given ``dict``. This can be used to provide custom | ||
| + deserializations (e.g. to support JSON-RPC class hinting). | ||
| + | ||
| + ``parse_float``, if specified, will be called with the string | ||
| + of every JSON float to be decoded. By default this is equivalent to | ||
| + float(num_str). This can be used to use another datatype or parser | ||
| + for JSON floats (e.g. decimal.Decimal). | ||
| + | ||
| + ``parse_int``, if specified, will be called with the string | ||
| + of every JSON int to be decoded. By default this is equivalent to | ||
| + int(num_str). This can be used to use another datatype or parser | ||
| + for JSON integers (e.g. float). | ||
| + | ||
| + ``parse_constant``, if specified, will be called with one of the | ||
| + following strings: -Infinity, Infinity, NaN. | ||
| + This can be used to raise an exception if invalid JSON numbers | ||
| + are encountered. | ||
| + | ||
| + """ | ||
| + self.encoding = encoding | ||
| + self.object_hook = object_hook | ||
| + self.parse_float = parse_float or float | ||
| + self.parse_int = parse_int or int | ||
| + self.parse_constant = parse_constant or _CONSTANTS.__getitem__ | ||
| + self.strict = strict | ||
| + self.parse_object = JSONObject | ||
| + self.parse_array = JSONArray | ||
| + self.parse_string = scanstring | ||
| + self.scan_once = make_scanner(self) | ||
| + | ||
| + def decode(self, s, _w=WHITESPACE.match): | ||
| + """Return the Python representation of ``s`` (a ``str`` or ``unicode`` | ||
| + instance containing a JSON document) | ||
| + | ||
| + """ | ||
| + obj, end = self.raw_decode(s, idx=_w(s, 0).end()) | ||
| + end = _w(s, end).end() | ||
| + if end != len(s): | ||
| + raise ValueError(errmsg("Extra data", s, end, len(s))) | ||
| + return obj | ||
| + | ||
| + def raw_decode(self, s, idx=0): | ||
| + """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning | ||
| + with a JSON document) and return a 2-tuple of the Python | ||
| + representation and the index in ``s`` where the document ended. | ||
| + | ||
| + This can be used to decode a JSON document from a string that may | ||
| + have extraneous data at the end. | ||
| + | ||
| + """ | ||
| + try: | ||
| + obj, end = self.scan_once(s, idx) | ||
| + except StopIteration: | ||
| + raise ValueError("No JSON object could be decoded") | ||
| + return obj, end |
436
simplejson/encoder.py
| @@ -0,0 +1,436 @@ | ||
| +"""Implementation of JSONEncoder | ||
| +""" | ||
| +import re | ||
| + | ||
| +try: | ||
| + from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii | ||
| +except ImportError: | ||
| + c_encode_basestring_ascii = None | ||
| +try: | ||
| + from simplejson._speedups import make_encoder as c_make_encoder | ||
| +except ImportError: | ||
| + c_make_encoder = None | ||
| + | ||
| +ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]') | ||
| +ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') | ||
| +HAS_UTF8 = re.compile(r'[\x80-\xff]') | ||
| +ESCAPE_DCT = { | ||
| + '\\': '\\\\', | ||
| + '"': '\\"', | ||
| + '\b': '\\b', | ||
| + '\f': '\\f', | ||
| + '\n': '\\n', | ||
| + '\r': '\\r', | ||
| + '\t': '\\t', | ||
| +} | ||
| +for i in range(0x20): | ||
| + ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) | ||
| + | ||
| +# Assume this produces an infinity on all machines (probably not guaranteed) | ||
| +INFINITY = float('1e66666') | ||
| +FLOAT_REPR = repr | ||
| + | ||
| +def encode_basestring(s): | ||
| + """Return a JSON representation of a Python string | ||
| + | ||
| + """ | ||
| + def replace(match): | ||
| + return ESCAPE_DCT[match.group(0)] | ||
| + return '"' + ESCAPE.sub(replace, s) + '"' | ||
| + | ||
| + | ||
| +def py_encode_basestring_ascii(s): | ||
| + """Return an ASCII-only JSON representation of a Python string | ||
| + | ||
| + """ | ||
| + if isinstance(s, str) and HAS_UTF8.search(s) is not None: | ||
| + s = s.decode('utf-8') | ||
| + def replace(match): | ||
| + s = match.group(0) | ||
| + try: | ||
| + return ESCAPE_DCT[s] | ||
| + except KeyError: | ||
| + n = ord(s) | ||
| + if n < 0x10000: | ||
| + return '\\u%04x' % (n,) | ||
| + else: | ||
| + # surrogate pair | ||
| + n -= 0x10000 | ||
| + s1 = 0xd800 | ((n >> 10) & 0x3ff) | ||
| + s2 = 0xdc00 | (n & 0x3ff) | ||
| + return '\\u%04x\\u%04x' % (s1, s2) | ||
| + return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' | ||
| + | ||
| + | ||
| +encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii | ||
| + | ||
| +class JSONEncoder(object): | ||
| + """Extensible JSON <http://json.org> encoder for Python data structures. | ||
| + | ||
| + Supports the following objects and types by default: | ||
| + | ||
| + +-------------------+---------------+ | ||
| + | Python | JSON | | ||
| + +===================+===============+ | ||
| + | dict | object | | ||
| + +-------------------+---------------+ | ||
| + | list, tuple | array | | ||
| + +-------------------+---------------+ | ||
| + | str, unicode | string | | ||
| + +-------------------+---------------+ | ||
| + | int, long, float | number | | ||
| + +-------------------+---------------+ | ||
| + | True | true | | ||
| + +-------------------+---------------+ | ||
| + | False | false | | ||
| + +-------------------+---------------+ | ||
| + | None | null | | ||
| + +-------------------+---------------+ | ||
| + | ||
| + To extend this to recognize other objects, subclass and implement a | ||
| + ``.default()`` method with another method that returns a serializable | ||
| + object for ``o`` if possible, otherwise it should call the superclass | ||
| + implementation (to raise ``TypeError``). | ||
| + | ||
| + """ | ||
| + item_separator = ', ' | ||
| + key_separator = ': ' | ||
| + def __init__(self, skipkeys=False, ensure_ascii=True, | ||
| + check_circular=True, allow_nan=True, sort_keys=False, | ||
| + indent=None, separators=None, encoding='utf-8', default=None): | ||
| + """Constructor for JSONEncoder, with sensible defaults. | ||
| + | ||
| + If skipkeys is False, then it is a TypeError to attempt | ||
| + encoding of keys that are not str, int, long, float or None. If | ||
| + skipkeys is True, such items are simply skipped. | ||
| + | ||
| + If ensure_ascii is True, the output is guaranteed to be str | ||
| + objects with all incoming unicode characters escaped. If | ||
| + ensure_ascii is false, the output will be unicode object. | ||
| + | ||
| + If check_circular is True, then lists, dicts, and custom encoded | ||
| + objects will be checked for circular references during encoding to | ||
| + prevent an infinite recursion (which would cause an OverflowError). | ||
| + Otherwise, no such check takes place. | ||
| + | ||
| + If allow_nan is True, then NaN, Infinity, and -Infinity will be | ||
| + encoded as such. This behavior is not JSON specification compliant, | ||
| + but is consistent with most JavaScript based encoders and decoders. | ||
| + Otherwise, it will be a ValueError to encode such floats. | ||
| + | ||
| + If sort_keys is True, then the output of dictionaries will be | ||
| + sorted by key; this is useful for regression tests to ensure | ||
| + that JSON serializations can be compared on a day-to-day basis. | ||
| + | ||
| + If indent is a non-negative integer, then JSON array | ||
| + elements and object members will be pretty-printed with that | ||
| + indent level. An indent level of 0 will only insert newlines. | ||
| + None is the most compact representation. | ||
| + | ||
| + If specified, separators should be a (item_separator, key_separator) | ||
| + tuple. The default is (', ', ': '). To get the most compact JSON | ||
| + representation you should specify (',', ':') to eliminate whitespace. | ||
| + | ||
| + If specified, default is a function that gets called for objects | ||
| + that can't otherwise be serialized. It should return a JSON encodable | ||
| + version of the object or raise a ``TypeError``. | ||
| + | ||
| + If encoding is not None, then all input strings will be | ||
| + transformed into unicode using that encoding prior to JSON-encoding. | ||
| + The default is UTF-8. | ||
| + | ||
| + """ | ||
| + | ||
| + self.skipkeys = skipkeys | ||
| + self.ensure_ascii = ensure_ascii | ||
| + self.check_circular = check_circular | ||
| + self.allow_nan = allow_nan | ||
| + self.sort_keys = sort_keys | ||
| + self.indent = indent | ||
| + if separators is not None: | ||
| + self.item_separator, self.key_separator = separators | ||
| + if default is not None: | ||
| + self.default = default | ||
| + self.encoding = encoding | ||
| + | ||
| + def default(self, o): | ||
| + """Implement this method in a subclass such that it returns | ||
| + a serializable object for ``o``, or calls the base implementation | ||
| + (to raise a ``TypeError``). | ||
| + | ||
| + For example, to support arbitrary iterators, you could | ||
| + implement default like this:: | ||
| + | ||
| + def default(self, o): | ||
| + try: | ||
| + iterable = iter(o) | ||
| + except TypeError: | ||
| + pass | ||
| + else: | ||
| + return list(iterable) | ||
| + return JSONEncoder.default(self, o) | ||
| + | ||
| + """ | ||
| + raise TypeError("%r is not JSON serializable" % (o,)) | ||
| + | ||
| + def encode(self, o): | ||
| + """Return a JSON string representation of a Python data structure. | ||
| + | ||
| + >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) | ||
| + '{"foo": ["bar", "baz"]}' | ||
| + | ||
| + """ | ||
| + # This is for extremely simple cases and benchmarks. | ||
| + if isinstance(o, basestring): | ||
| + if isinstance(o, str): | ||
| + _encoding = self.encoding | ||
| + if (_encoding is not None | ||
| + and not (_encoding == 'utf-8')): | ||
| + o = o.decode(_encoding) | ||
| + if self.ensure_ascii: | ||
| + return encode_basestring_ascii(o) | ||
| + else: | ||
| + return encode_basestring(o) | ||
| + # This doesn't pass the iterator directly to ''.join() because the | ||
| + # exceptions aren't as detailed. The list call should be roughly | ||
| + # equivalent to the PySequence_Fast that ''.join() would do. | ||
| + chunks = self.iterencode(o, _one_shot=True) | ||
| + if not isinstance(chunks, (list, tuple)): | ||
| + chunks = list(chunks) | ||
| + return ''.join(chunks) | ||
| + | ||
| + def iterencode(self, o, _one_shot=False): | ||
| + """Encode the given object and yield each string | ||
| + representation as available. | ||
| + | ||
| + For example:: | ||
| + | ||
| + for chunk in JSONEncoder().iterencode(bigobject): | ||
| + mysocket.write(chunk) | ||
| + | ||
| + """ | ||
| + if self.check_circular: | ||
| + markers = {} | ||
| + else: | ||
| + markers = None | ||
| + if self.ensure_ascii: | ||
| + _encoder = encode_basestring_ascii | ||
| + else: | ||
| + _encoder = encode_basestring | ||
| + if self.encoding != 'utf-8': | ||
| + def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): | ||
| + if isinstance(o, str): | ||
| + o = o.decode(_encoding) | ||
| + return _orig_encoder(o) | ||
| + | ||
| + def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY): | ||
| + # Check for specials. Note that this type of test is processor- and/or | ||
| + # platform-specific, so do tests which don't depend on the internals. | ||
| + | ||
| + if o != o: | ||
| + text = 'NaN' | ||
| + elif o == _inf: | ||
| + text = 'Infinity' | ||
| + elif o == _neginf: | ||
| + text = '-Infinity' | ||
| + else: | ||
| + return _repr(o) | ||
| + | ||
| + if not allow_nan: | ||
| + raise ValueError("Out of range float values are not JSON compliant: %r" | ||
| + % (o,)) | ||
| + | ||
| + return text | ||
| + | ||
| + | ||
| + if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys: | ||
| + _iterencode = c_make_encoder( | ||
| + markers, self.default, _encoder, self.indent, | ||
| + self.key_separator, self.item_separator, self.sort_keys, | ||
| + self.skipkeys, self.allow_nan) | ||
| + else: | ||
| + _iterencode = _make_iterencode( | ||
| + markers, self.default, _encoder, self.indent, floatstr, | ||
| + self.key_separator, self.item_separator, self.sort_keys, | ||
| + self.skipkeys, _one_shot) | ||
| + return _iterencode(o, 0) | ||
| + | ||
| +def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, | ||
| + ## HACK: hand-optimized bytecode; turn globals into locals | ||
| + False=False, | ||
| + True=True, | ||
| + ValueError=ValueError, | ||
| + basestring=basestring, | ||
| + dict=dict, | ||
| + float=float, | ||
| + id=id, | ||
| + int=int, | ||
| + isinstance=isinstance, | ||
| + list=list, | ||
| + long=long, | ||
| + str=str, | ||
| + tuple=tuple, | ||
| + ): | ||
| + | ||
| + def _iterencode_list(lst, _current_indent_level): | ||
| + if not lst: | ||
| + yield '[]' | ||
| + return | ||
| + if markers is not None: | ||
| + markerid = id(lst) | ||
| + if markerid in markers: | ||
| + raise ValueError("Circular reference detected") | ||
| + markers[markerid] = lst | ||
| + buf = '[' | ||
| + if _indent is not None: | ||
| + _current_indent_level += 1 | ||
| + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + separator = _item_separator + newline_indent | ||
| + buf += newline_indent | ||
| + else: | ||
| + newline_indent = None | ||
| + separator = _item_separator | ||
| + first = True | ||
| + for value in lst: | ||
| + if first: | ||
| + first = False | ||
| + else: | ||
| + buf = separator | ||
| + if isinstance(value, basestring): | ||
| + yield buf + _encoder(value) | ||
| + elif value is None: | ||
| + yield buf + 'null' | ||
| + elif value is True: | ||
| + yield buf + 'true' | ||
| + elif value is False: | ||
| + yield buf + 'false' | ||
| + elif isinstance(value, (int, long)): | ||
| + yield buf + str(value) | ||
| + elif isinstance(value, float): | ||
| + yield buf + _floatstr(value) | ||
| + else: | ||
| + yield buf | ||
| + if isinstance(value, (list, tuple)): | ||
| + chunks = _iterencode_list(value, _current_indent_level) | ||
| + elif isinstance(value, dict): | ||
| + chunks = _iterencode_dict(value, _current_indent_level) | ||
| + else: | ||
| + chunks = _iterencode(value, _current_indent_level) | ||
| + for chunk in chunks: | ||
| + yield chunk | ||
| + if newline_indent is not None: | ||
| + _current_indent_level -= 1 | ||
| + yield '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + yield ']' | ||
| + if markers is not None: | ||
| + del markers[markerid] | ||
| + | ||
| + def _iterencode_dict(dct, _current_indent_level): | ||
| + if not dct: | ||
| + yield '{}' | ||
| + return | ||
| + if markers is not None: | ||
| + markerid = id(dct) | ||
| + if markerid in markers: | ||
| + raise ValueError("Circular reference detected") | ||
| + markers[markerid] = dct | ||
| + yield '{' | ||
| + if _indent is not None: | ||
| + _current_indent_level += 1 | ||
| + newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + item_separator = _item_separator + newline_indent | ||
| + yield newline_indent | ||
| + else: | ||
| + newline_indent = None | ||
| + item_separator = _item_separator | ||
| + first = True | ||
| + if _sort_keys: | ||
| + items = dct.items() | ||
| + items.sort(key=lambda kv: kv[0]) | ||
| + else: | ||
| + items = dct.iteritems() | ||
| + for key, value in items: | ||
| + if isinstance(key, basestring): | ||
| + pass | ||
| + # JavaScript is weakly typed for these, so it makes sense to | ||
| + # also allow them. Many encoders seem to do something like this. | ||
| + elif isinstance(key, float): | ||
| + key = _floatstr(key) | ||
| + elif isinstance(key, (int, long)): | ||
| + key = str(key) | ||
| + elif key is True: | ||
| + key = 'true' | ||
| + elif key is False: | ||
| + key = 'false' | ||
| + elif key is None: | ||
| + key = 'null' | ||
| + elif _skipkeys: | ||
| + continue | ||
| + else: | ||
| + raise TypeError("key %r is not a string" % (key,)) | ||
| + if first: | ||
| + first = False | ||
| + else: | ||
| + yield item_separator | ||
| + yield _encoder(key) | ||
| + yield _key_separator | ||
| + if isinstance(value, basestring): | ||
| + yield _encoder(value) | ||
| + elif value is None: | ||
| + yield 'null' | ||
| + elif value is True: | ||
| + yield 'true' | ||
| + elif value is False: | ||
| + yield 'false' | ||
| + elif isinstance(value, (int, long)): | ||
| + yield str(value) | ||
| + elif isinstance(value, float): | ||
| + yield _floatstr(value) | ||
| + else: | ||
| + if isinstance(value, (list, tuple)): | ||
| + chunks = _iterencode_list(value, _current_indent_level) | ||
| + elif isinstance(value, dict): | ||
| + chunks = _iterencode_dict(value, _current_indent_level) | ||
| + else: | ||
| + chunks = _iterencode(value, _current_indent_level) | ||
| + for chunk in chunks: | ||
| + yield chunk | ||
| + if newline_indent is not None: | ||
| + _current_indent_level -= 1 | ||
| + yield '\n' + (' ' * (_indent * _current_indent_level)) | ||
| + yield '}' | ||
| + if markers is not None: | ||
| + del markers[markerid] | ||
| + | ||
| + def _iterencode(o, _current_indent_level): | ||
| + if isinstance(o, basestring): | ||
| + yield _encoder(o) | ||
| + elif o is None: | ||
| + yield 'null' | ||
| + elif o is True: | ||
| + yield 'true' | ||
| + elif o is False: | ||
| + yield 'false' | ||
| + elif isinstance(o, (int, long)): | ||
| + yield str(o) | ||
| + elif isinstance(o, float): | ||
| + yield _floatstr(o) | ||
| + elif isinstance(o, (list, tuple)): | ||
| + for chunk in _iterencode_list(o, _current_indent_level): | ||
| + yield chunk | ||
| + elif isinstance(o, dict): | ||
| + for chunk in _iterencode_dict(o, _current_indent_level): | ||
| + yield chunk | ||
| + else: | ||
| + if markers is not None: | ||
| + markerid = id(o) | ||
| + if markerid in markers: | ||
| + raise ValueError("Circular reference detected") | ||
| + markers[markerid] = o | ||
| + o = _default(o) | ||
| + for chunk in _iterencode(o, _current_indent_level): | ||
| + yield chunk | ||
| + if markers is not None: | ||
| + del markers[markerid] | ||
| + | ||
| + return _iterencode |
65
simplejson/scanner.py
| @@ -0,0 +1,65 @@ | ||
| +"""JSON token scanner | ||
| +""" | ||
| +import re | ||
| +try: | ||
| + from simplejson._speedups import make_scanner as c_make_scanner | ||
| +except ImportError: | ||
| + c_make_scanner = None | ||
| + | ||
| +__all__ = ['make_scanner'] | ||
| + | ||
| +NUMBER_RE = re.compile( | ||
| + r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?', | ||
| + (re.VERBOSE | re.MULTILINE | re.DOTALL)) | ||
| + | ||
| +def py_make_scanner(context): | ||
| + parse_object = context.parse_object | ||
| + parse_array = context.parse_array | ||
| + parse_string = context.parse_string | ||
| + match_number = NUMBER_RE.match | ||
| + encoding = context.encoding | ||
| + strict = context.strict | ||
| + parse_float = context.parse_float | ||
| + parse_int = context.parse_int | ||
| + parse_constant = context.parse_constant | ||
| + object_hook = context.object_hook | ||
| + | ||
| + def _scan_once(string, idx): | ||
| + try: | ||
| + nextchar = string[idx] | ||
| + except IndexError: | ||
| + raise StopIteration | ||
| + | ||
| + if nextchar == '"': | ||
| + return parse_string(string, idx + 1, encoding, strict) | ||
| + elif nextchar == '{': | ||
| + return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook) | ||
| + elif nextchar == '[': | ||
| + return parse_array((string, idx + 1), _scan_once) | ||
| + elif nextchar == 'n' and string[idx:idx + 4] == 'null': | ||
| + return None, idx + 4 | ||
| + elif nextchar == 't' and string[idx:idx + 4] == 'true': | ||
| + return True, idx + 4 | ||
| + elif nextchar == 'f' and string[idx:idx + 5] == 'false': | ||
| + return False, idx + 5 | ||
| + | ||
| + m = match_number(string, idx) | ||
| + if m is not None: | ||
| + integer, frac, exp = m.groups() | ||
| + if frac or exp: | ||
| + res = parse_float(integer + (frac or '') + (exp or '')) | ||
| + else: | ||
| + res = parse_int(integer) | ||
| + return res, m.end() | ||
| + elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': | ||
| + return parse_constant('NaN'), idx + 3 | ||
| + elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': | ||
| + return parse_constant('Infinity'), idx + 8 | ||
| + elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': | ||
| + return parse_constant('-Infinity'), idx + 9 | ||
| + else: | ||
| + raise StopIteration | ||
| + | ||
| + return _scan_once | ||
| + | ||
| +make_scanner = c_make_scanner or py_make_scanner |
35
simplejson/tool.py
| @@ -0,0 +1,35 @@ | ||
| +r"""Using simplejson from the shell to validate and | ||
| +pretty-print:: | ||
| + | ||
| + $ echo '{"json":"obj"}' | python -msimplejson.tool | ||
| + { | ||
| + "json": "obj" | ||
| + } | ||
| + $ echo '{ 1.2:3.4}' | python -msimplejson.tool | ||
| + Expecting property name: line 1 column 2 (char 2) | ||
| +""" | ||
| +import simplejson | ||
| + | ||
| +def main(): | ||
| + import sys | ||
| + if len(sys.argv) == 1: | ||
| + infile = sys.stdin | ||
| + outfile = sys.stdout | ||
| + elif len(sys.argv) == 2: | ||
| + infile = open(sys.argv[1], 'rb') | ||
| + outfile = sys.stdout | ||
| + elif len(sys.argv) == 3: | ||
| + infile = open(sys.argv[1], 'rb') | ||
| + outfile = open(sys.argv[2], 'wb') | ||
| + else: | ||
| + raise SystemExit("%s [infile [outfile]]" % (sys.argv[0],)) | ||
| + try: | ||
| + obj = simplejson.load(infile) | ||
| + except ValueError, e: | ||
| + raise SystemExit(e) | ||
| + simplejson.dump(obj, outfile, sort_keys=True, indent=4) | ||
| + outfile.write('\n') | ||
| + | ||
| + | ||
| +if __name__ == '__main__': | ||
| + main() |
1
testdata/direct_message-destroy.json
| @@ -0,0 +1 @@ | ||
| +{"created_at":"Tue Jun 05 06:00:51 +0000 2007","recipient_id":718443,"sender_id":673483,"sender_screen_name":"dewitt","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","recipient_screen_name":"kesuke","id":3496342} |
1
testdata/direct_messages-new.json
| @@ -0,0 +1 @@ | ||
| +{"sender_screen_name":"dewitt","created_at":"Tue Jun 05 05:53:22 +0000 2007","recipient_screen_name":"kesuke","recipient_id":718443,"text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":3496202,"sender_id":673483} |
1
testdata/direct_messages.json
| @@ -0,0 +1 @@ | ||
| +[{"created_at":"Mon Jun 04 00:07:58 +0000 2007","recipient_id":718443,"sender_id":673483,"text":"A l\u00e9gp\u00e1rn\u00e1s haj\u00f3m tele van angoln\u00e1kkal.","sender_screen_name":"dewitt","id":3444662,"recipient_screen_name":"kesuke"}] |
1
testdata/featured.json
| @@ -0,0 +1 @@ | ||
| +[{"name":"Steven Wright","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/5819362/normal/sw.jpg?1178499811","screen_name":"stevenwright","description":"Every day, one quote from me.","location":"","url":null,"id":5819362,"protected":false,"status":{"created_at":"Fri Jun 01 16:06:28 +0000 2007","text":"I'm addicted to placebos. I could quit but it wouldn't matter.","id":86991742}},{"name":"Justine","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/7846/normal/iJustine_100x100.jpg?1174864273","screen_name":"ijustine","description":"I am the internet.","location":"Pittsburgh, PA","url":"http://www.tastyblogsnack.com","id":7846,"protected":false,"status":{"created_at":"Sun Jun 03 19:57:09 +0000 2007","text":"Please help me wake up Starbucks.","id":89591842}},{"name":"timer","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/5997662/normal/timer.gif?1179331208","screen_name":"timer","description":"","location":"","url":"http://retweet.com/timer","id":5997662,"protected":false,"status":{"created_at":"Wed May 16 16:13:44 +0000 2007","text":"Need to remember something? Send me a direct message, and I'll tweet you back. For example, 'd timer 45 call mom' reminds you in 45 minutes.","id":66100582}},{"name":"R1 Big Weekend","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/5962712/normal/r1logo_twitter.jpg?1179139696","screen_name":"r1bigweekend","description":null,"location":null,"url":null,"id":5962712,"protected":false,"status":{"created_at":"Sun May 20 21:55:23 +0000 2007","text":"Thanks Preston. The site is quickly getting packed up now. It's dark and cold outside.","id":71673422}},{"name":"Scott Hanselman","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/5676102/normal/scott48x48.jpg?1177998926","screen_name":"shanselman","description":null,"location":null,"url":null,"id":5676102,"protected":false,"status":{"created_at":"Wed May 30 17:35:25 +0000 2007","text":"Blood sugar is 110. Great way to start the day. Ready for breakfast.","id":84275702}},{"name":"Jodrell Bank","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/5747502/normal/twitter_jodrellbank.png?1178216103","screen_name":"jodrellbank","description":"Home of the world's third-largest steerable radio telescope, and the MERLIN National Facility. Part of the Univ. of Manchester","location":"53.236057, -2.306871","url":"http://www.manchester.ac.uk/jodrellbank/","id":5747502,"protected":false,"status":{"created_at":"Sat Jun 02 13:30:00 +0000 2007","text":"Getting ready to bounce poems off the Moon and pick up their echoes with the Telescope as part of the First Move Festival on June 15-17","id":88102552}},{"name":"TwitLit","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/5588242/normal/twittericon.png?1178498116","screen_name":"TwitterLit","description":"Twittering the first lines of books so you don't have to. SEE ALSO: TwitterLitUK - TwitterLitCA - TwitterLitNews. [To comment, d. msg. me or contact via site.]","location":"The Stacks","url":"http://twitterlit.com","id":5588242,"protected":false,"status":{"created_at":"Sun Jun 03 09:00:46 +0000 2007","text":"\"On the drive from the suburbs to the city, we'd experienced a disturbing number of memory lapses\" http://tweetl.com/t0","id":89035512}},{"name":"BlogPhiladelphia","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/5724872/normal/blogphiladelphia_logo.gif?1178142536","screen_name":"BlogPhilly","description":"A Social Media UnConference in Philly","location":"Philly!","url":"http://blogphiladelphia.net","id":5724872,"protected":false,"status":{"created_at":"Sun Jun 03 20:32:29 +0000 2007","text":"wooohoooo home in nolibs the land of indie rock tight jeans and snark aplenty. good to be back","id":89621942}},{"name":"Lisa@FashWEEK","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/5291252/normal/TWITTER.jpg?1177044270","screen_name":"OzFashionWeek","description":"Fashion, Beauty + Style editor for News.com.au","location":"Sydney","url":"http://www.news.com.au/entertainment/feature/0,,5012703,00.html","id":5291252,"protected":false,"status":{"created_at":"Tue May 29 06:24:23 +0000 2007","text":"spent today getting distracted by net-a-porter.com ... that's a dangerous site for a fashion addict!","id":82160232}},{"name":"palm pictures","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/4260991/normal/cometohillary.gif?1176335676","screen_name":"palmpictures","description":"Independent Music, Film, and Love!","location":"New York, NY","url":"http://www.palmpictures.com","id":4260991,"protected":false,"status":{"created_at":"Sun Jun 03 01:16:22 +0000 2007","text":"loves our Book Expo booth neighbors from Endless Games. check em out http://endlessgames.com/gamesmen.html","id":88685562}},{"name":"Barack Obama","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/813286/normal/iconbg.jpg?1177633039","screen_name":"BarackObama","description":"","location":"Chicago, IL","url":"http://www.barackobama.com","id":813286,"protected":false,"status":{"created_at":"Sun Jun 03 19:49:52 +0000 2007","text":"Heading to Democratic presidential debate at St. Anselm College in New Hampshire. Debate starts at 7pm EST and will air live on CNN.","id":89585682}},{"name":"Hollywood.com Live","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/4455001/normal/H_48x48c.gif?1177618725","screen_name":"hollywoodcom","description":"Hollywood.com Live @ The Tribeca Film Festival","location":"New York, NY","url":"http://fansites.hollywood.com/live.html","id":4455001,"protected":false,"status":{"created_at":"Thu May 24 02:29:35 +0000 2007","text":"Blake, where are you now?! We'll miss you nxt wk... Go IDOLS! ... See you both over the rainbow!","id":76045122}},{"name":"Veronica","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/10350/normal/mypictr_140x144.jpg?1179253942","screen_name":"Veronica","description":"CNET TV host and podcasting diva of Buzz Out Loud","location":"San Francisco","url":"http://www.veronicabelmont.com","id":10350,"protected":false,"status":{"created_at":"Sun Jun 03 03:11:10 +0000 2007","text":"i just saw kari byron! my hero!","id":88774212}},{"name":"Alison","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/5248441/normal/new1.jpg?1177002287","screen_name":"AFineFrenzy","description":"","location":"Los Angeles","url":"http://www.myspace.com/afinefrenzy","id":5248441,"protected":false,"status":{"created_at":"Sat Jun 02 06:52:41 +0000 2007","text":"the sea took my sunglasses, i took a boatload of sand home in my bikini. fair trade? the moon is orange. my skin is still warm from the sun.","id":87808312}},{"name":"Rocketboom","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/1502111/normal/rocket.jpg?1174312502","screen_name":"Rocketboom","description":"Daily","location":"New York City","url":"http://www.rocketboom.com","id":1502111,"protected":false,"status":{"created_at":"Sat Jun 02 15:33:06 +0000 2007","text":"Ultimate Frisbee 2.0 in Central Park today on Sat. @ 4pm: http://groups.google.com/group/frisbee2point0/","id":88218242}},{"name":"TrippingOnWords","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/59003/normal/lara_and_claire_BIG_HAIR.jpg?1171962588","screen_name":"TrippingOnWords","description":"Training with 200 Kenyan Orphans for the Hope Runs Marathon and 10K. As a new non profit, Hope Runs is looking for help\u2026check us out at TrippingOnWords.com!","location":"Kenya","url":"http://TrippingOnWords.com","id":59003,"protected":false,"status":{"created_at":"Sat Jun 02 18:55:59 +0000 2007","text":"heading to bed early...visiting Manager's church tomorrow. an early start is essential, apparently","id":88405002}},{"name":"Web 2.0 Expo","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/4200861/normal/webex2007_logo_square.jpg?1176317215","screen_name":"w2e","description":"Official twitter for the Web 2.0 Expo","location":"san francisco, ca","url":"http://blog.web2expo.com","id":4200861,"protected":false,"status":{"created_at":"Thu Apr 19 01:00:17 +0000 2007","text":"Thanks for coming! \r\nWine being brought to web2open in secs.","id":32588451}},{"name":"Jessica Mellott","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/3658381/normal/Jessica_Mellott.jpg?1177685550","screen_name":"JessicaMellott","description":"Teen pop singer www.jessicamellott.com www.myspace.com/jessicamellott","location":"Maryland","url":"http://www.jessicamellott.com","id":3658381,"protected":false,"status":{"created_at":"Fri Jun 01 23:36:46 +0000 2007","text":"Getting ready for grad party weekend!","id":87429882}},{"name":"Drive","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/3946281/normal/nathan.png?1176155112","screen_name":"foxdrive","description":"Action-fueled drama about an illegal, underground cross-country road race. Director Greg Yaitanes will Twitter live director's commentary starting Sunday 8/7c.","location":"","url":null,"id":3946281,"protected":false,"status":{"created_at":"Tue Apr 17 04:05:53 +0000 2007","text":"twitter and tell me what you thought of the episode tonight.","id":30603561}},{"name":"Status Updates","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/46413/normal/maintenance.gif?1171961490","screen_name":"twitter_status","description":"140 characters or less on the health of Twitter!","location":"Inside the Twitter","url":null,"id":46413,"protected":false,"status":{"created_at":"Tue May 29 07:54:18 +0000 2007","text":"Catching up on processing updates after some brief confusion.","id":82226192}}] |
1
testdata/followers.json
| @@ -0,0 +1 @@ | ||
| +[{"name":"Robert Brook","description":null,"location":"London","url":"http://www.druidstreet.com/","id":5567,"protected":false,"status":{"created_at":"Sun Jun 03 19:56:54 +0000 2007","text":"poking greader trends - some must live, some must die","id":89591602},"profile_image_url":"http://assets1.twitter.com/images/default_image.gif?1180755379","screen_name":"robertbrook"},{"name":"cote","description":"Industry analyst with RedMonk. DrunkAndRetired.com. YUH!","location":"Austin, Texas","url":"http://www.peopleoverprocess.com/","id":53953,"protected":false,"status":{"created_at":"Sun Jun 03 19:54:55 +0000 2007","text":"Flight to ORD delayed 2 hours. This can't be good.","id":89589762},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/53953/normal/square-lil-hat.jpg?1171962174","screen_name":"cote"},{"name":"Moby","description":"I'm here. Now what?","location":"San Francisco","url":"http://blog.mobius.name","id":4296211,"protected":false,"status":{"created_at":"Sun Jun 03 19:46:00 +0000 2007","text":"Responding to hate mail. \"Ignunce\" at its finest","id":89583032},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/4296211/normal/mobycycle.031007.jpg?1176350283","screen_name":"ibod8x5"},{"name":"Alex King","description":"","location":"Denver, CO","url":"http://alexking.org","id":101143,"protected":false,"status":{"created_at":"Sun Jun 03 19:06:54 +0000 2007","text":"@andrewhyde Coverage was poor enough in the area to drive me back to Sprint.","id":89554432},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/101143/normal/alex_king.jpg?1171953145","screen_name":"alexkingorg"},{"name":"Patrick Mueller","description":"Graying IBMer working for the WebSphere organization","location":"The Triangle, NC","url":"http://muellerware.org","id":765080,"protected":false,"status":{"created_at":"Sun Jun 03 18:48:51 +0000 2007","text":"@JoshStaiger that was one I was looking at, which Google (coincidentally?) ranked highly. Wondering how far I can get w/avacado & salsa","id":89537732},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/765080/normal/glasses-down-115x115-gradient.jpg?1177726044","screen_name":"pmuellr"},{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","url":null,"id":718443,"protected":false,"status":{"created_at":"Sun Jun 03 18:15:29 +0000 2007","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":89512102},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","screen_name":"kesuke"},{"name":"Jeff Barr","description":"Amazon Web Services Evangelist, Blogger, Father of 5.","location":"Sammamish, Washington, USA","url":"http://www.jeff-barr.com","id":48443,"protected":false,"status":{"created_at":"Sun Jun 03 16:43:47 +0000 2007","text":"Preparing for trip to DC tomorrow AM - lots of reading materials, TODO list, iPod fresh, camera charged, laptop packed. Arrange for taxi.","id":89432112},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/48443/normal/jeff_barr.jpg?1171961668","screen_name":"jeffbarr"},{"name":"Paul Downey","description":"Computing Industry Bi-product","location":"Berkhamsted, UK","url":"http://blog.whatfettle.com","id":13486,"protected":false,"status":{"created_at":"Sun Jun 03 16:24:53 +0000 2007","text":"back from tea and cake at the village hall now resuming futile search for a family holiday on 'tinternet","id":89413652},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/13486/normal/psd-75x75.jpg?1171954493","screen_name":"psd"},{"name":"C Ortiz","description":"Amongst other things that keep me far away from a computer, I manage PrivateMilitary.org","location":"UK | US","url":"http://www.privatemilitary.org/","id":2306071,"protected":false,"status":{"created_at":"Sun Jun 03 16:10:44 +0000 2007","text":"Sunday reading: security contractors snatched without a shot: http://tinyurl.com/2qyv6x","id":89399902},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/2306071/normal/photo_twitter.jpg?1177775770","screen_name":"privatemilitary"},{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","url":"http://redmonk.com/sogrady","id":143883,"protected":false,"status":{"created_at":"Sun Jun 03 15:57:16 +0000 2007","text":"ah, it's under \"Reply\"","id":89385112},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","screen_name":"sogrady"},{"name":"Alexander J Turner","description":"Hubby, Dad & Chemist - Escaped Into IT Land To Cause Damage As A Software Architect!","location":"Oxford, UK","url":"http://www.nerds-central.com","id":1621891,"protected":false,"status":{"created_at":"Sun Jun 03 15:34:24 +0000 2007","text":"@chrisborgan - Just got back from a 20mile bike ride. You're right - it is all about habits. Thanks! less than 5 seconds ago from web","id":89364302},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/1621891/normal/MadAlex256.png?1178292687","screen_name":"AlexTurner"},{"name":"Josh Lucas","description":"Just adding another bit of distraction...","location":"Pasadena, CA","url":"http://www.stonecottage.com/josh/","id":47023,"protected":false,"status":{"created_at":"Sun Jun 03 15:16:47 +0000 2007","text":"wearing my new old-skool upcoming shirt","id":89345632},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/47023/normal/cubs_me.jpg?1171961535","screen_name":"lucasjosh"},{"name":"jark","description":"Co-Founder of deviantART","location":"Tokyo, Japan","url":"http://jarkolicious.com/","id":39653,"protected":false,"status":{"created_at":"Sun Jun 03 14:51:25 +0000 2007","text":"heads to bed, but first starts a process to burn 300 to DVD.","id":89318752},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/39653/normal/jark-static.jpg?1171960858","screen_name":"jark"},{"name":"Paul Terry Walhus","description":"developer building team for http://searchslides.com & http://web2.0slides.com","location":"Austin, Texas","url":"http://austinblogger.com/blog/","id":1418,"protected":false,"status":{"created_at":"Sun Jun 03 14:05:39 +0000 2007","text":"Geni - Everyone's Related: cool web 2.0 family tree app, requires login http://www.geni.com/tree","id":89277722},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/1418/normal/paulterrywalhus.jpg?1175716284","screen_name":"springnet"},{"name":"Floozle","description":"","location":"","url":null,"id":58863,"protected":false,"status":{"created_at":"Sun Jun 03 12:50:02 +0000 2007","text":"Off to the office to complete the conversion","id":89202702},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/58863/normal/29logo3-med.gif?1174609772","screen_name":"Floozle"},{"name":" T\u00c7","description":"Chem: Physics: Math: Logic: Observation: Analysis: Hypothesis: Experimentation: Iteration: Evolution: Science. It works, bitches.","location":"San Francisco, CA","url":"http://tantek.com/","id":11628,"protected":false,"status":{"created_at":"Sun Jun 03 10:34:10 +0000 2007","text":"pondering how 1 can b so tired & sore after exercise, yet so energized as 2 b up at 3:30am, w only *1* coffee instead of usual 3 per workday","id":89104612},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/11628/normal/icon200px.jpg?1171953743","screen_name":"t"},{"name":"adam","description":"http://ifindkarma.com/","location":"Palo Alto, CA","url":"http://renkoo.com/profile/ee0e95249268b86ff2053bef214bfeda","id":1688,"protected":false,"status":{"created_at":"Sun Jun 03 07:07:00 +0000 2007","text":"Norman Mailer on the idea of perfect happiness: \"A fool draws a road map to his magic city.\" (Vanity Fair, Jan 2007)","id":88952182},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/1688/normal/RifkinIcon.jpg?1179096764","screen_name":"ifindkarma"},{"name":"Cameron Walters","description":"getting satisfaction daily","location":"San Francisco","url":"http://chuddup.com/","id":3922,"protected":false,"status":{"created_at":"Sun Jun 03 06:54:14 +0000 2007","text":"Earlier: Pilates. Now: Spazzy thrashcore electro.","id":88940252},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/3922/normal/awesome.jpg?1177104664","screen_name":"ceedub"},{"name":"Jason Calacanis","description":"Looking for TNBT","location":"LA","url":"http://www.calacanis.com","id":3840,"protected":false,"status":{"created_at":"Sun Jun 03 06:49:36 +0000 2007","text":"Having dinner with loic, sam harris, jared diamond, and john brockman... Among others.","id":88935882},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/3840/normal/jason2.jpg?1172771113","screen_name":"JasonCalacanis"},{"name":"hober","description":"One-line bios are hard.","location":"San Diego, CA","url":"http://edward.oconnor.cx/","id":13607,"protected":false,"status":{"created_at":"Sun Jun 03 06:41:04 +0000 2007","text":"Emacs 22 released while I was at BarCamp. heh. http://www.gnu.org/software/emacs/","id":88929942},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/13607/normal/ted-icon-large.jpg?1171954556","screen_name":"hober"},{"name":" Jay","description":"Senior Systems Administrator of the Unix variety ","location":"Houston","url":null,"id":336113,"protected":false,"status":{"created_at":"Sun Jun 03 04:36:50 +0000 2007","text":"Wonders what kind of cheese Kristie spent $70 on.","id":88843542},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/336113/normal/MyPicture.jpg?1179799311","screen_name":"meangrape"},{"name":"Tatsuhiko Miyagawa","description":"Yet another Perl hacker","location":"San Francisco","url":"http://bulknews.vox.com/","id":731253,"protected":false,"status":{"created_at":"Sun Jun 03 04:06:57 +0000 2007","text":"had a good rice/veggie/pork noodle and shrimp fried rice in HoH. Feeling almost full and too ricey","id":88820042},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/731253/normal/P506iC0003735833.jpg?1170146286","screen_name":"miyagawa"},{"name":" Rod Begbie","description":"Cantankerous Scots git.","location":"Somerville, MA","url":"http://groovymother.com/","id":761,"protected":false,"status":{"created_at":"Sun Jun 03 03:43:42 +0000 2007","text":"Fun first day in LA. Looked at stars in sidewalk, got approached by Scientologists three times, Mexican dinner with old friends. Happy!","id":88800482},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/761/normal/vatar.png?1167697094","screen_name":"rodbegbie"},{"name":"Veronica","description":"CNET TV host and podcasting diva of Buzz Out Loud","location":"San Francisco","url":"http://www.veronicabelmont.com","id":10350,"protected":false,"status":{"created_at":"Sun Jun 03 03:11:10 +0000 2007","text":"i just saw kari byron! my hero!","id":88774212},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/10350/normal/mypictr_140x144.jpg?1179253942","screen_name":"Veronica"},{"name":"eric L","description":"mobile expert, internet idiot","location":"san francisco","url":"http://www.n1s.net","id":8291,"protected":false,"status":{"created_at":"Sun Jun 03 03:08:04 +0000 2007","text":"A howat once said there's no scorin with the sporin.","id":88771502},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/8291/normal/bandit.jpg?1165079078","screen_name":"n1s"},{"name":"Mr Messina","description":"As if concentrating wasn't hard enough already.","location":"94107","url":"http://factoryjoe.com/","id":1186,"protected":false,"status":{"created_at":"Sun Jun 03 01:42:47 +0000 2007","text":"OpenID won the disruptor award at The NextWeb conference! http://tinyurl.com/yq8e89","id":88706172},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/1186/normal/devil_150.jpg?1171953828","screen_name":"factoryjoe"},{"name":"Scobleizer","description":"Tech geek blogger @ http://scobleizer.com","location":"Half Moon Bay, California, USA","url":"http://scobleshow.com","id":13348,"protected":false,"status":{"created_at":"Sun Jun 03 01:06:52 +0000 2007","text":"I can't get into my Flickr account. I can get into Yahoo, but not Flickr... http://tinyurl.com/2b63rh","id":88677782},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/13348/normal/trafficlight.jpg?1175390038","screen_name":"Scobleizer"},{"name":"Andy Edmonds","description":null,"location":null,"url":null,"id":936361,"protected":false,"status":{"created_at":"Sat Jun 02 22:09:55 +0000 2007","text":"Tries out squidoo at http://www.squidoo.com/eyetrack","id":88551872},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/936361/normal/andy_headshot_48x48.png?1176350570","screen_name":"andyed"},{"name":"Ian McKellar","description":"","location":"San Francisco, CA","url":"http://ian.mckellar.org/","id":259,"protected":false,"status":{"created_at":"Sat Jun 02 20:55:30 +0000 2007","text":"lolfeeds got shut down for using too much cpu so I had to get around to adding a caching layer. it fixed some character encoding issues too!","id":88496342},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/259/normal/trapped.jpg?1171957894","screen_name":"ianmckellar"},{"name":"Jeremy Zawodny","description":"I fly and geek.","location":"San Jose, CA","url":"http://jeremy.zawodny.com/blog/","id":97933,"protected":false,"status":{"created_at":"Sat Jun 02 20:04:53 +0000 2007","text":"errands and packing for a week in the desert next week...","id":88458352},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/97933/normal/Zawodny-md.jpg?1166680073","screen_name":"jzawodn"},{"name":"Mihai","description":"","location":"New York, NY","url":"http://persistent.info/","id":28203,"protected":false,"status":{"created_at":"Sat Jun 02 19:46:47 +0000 2007","text":"Back on campus for reunions. Don't feel old just yet. Phew.","id":88444532},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/28203/normal/mihaip.jpg?1171958069","screen_name":"mihai"},{"name":"Robert Merrill","description":"Helping People & Teams Become Better","location":"Provo, Utah","url":"http://www.utahtechjobs.com","id":755721,"protected":false,"status":{"created_at":"Sat Jun 02 17:16:45 +0000 2007","text":"Heading home","id":88316782},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/755721/normal/50998991_N00.jpg?1170908379","screen_name":"robertmerrill"},{"name":"Darren Kulp","description":"Computer, linguistics, and music geek. What else is there to say?","location":"Eau Claire, WI, USA","url":"http://kulp.ch/","id":6083072,"protected":false,"status":{"created_at":"Sat Jun 02 15:39:55 +0000 2007","text":"twitterim finally works? Fancy that.","id":88225602},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/6083072/normal/f1.jpg?1179303021","screen_name":"kulp"},{"name":"sean coon","description":"trying to make a living and a difference...","location":"Greensboro, NC","url":"http://www.seancoon.org","id":677903,"protected":false,"status":{"created_at":"Sat Jun 02 08:27:03 +0000 2007","text":"Holy moly. Time to go fishing. Yeeeaaahhh!","id":87876222},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/677903/normal/sean-85.png?1173252480","screen_name":"spcoon"},{"name":"Niall","description":"Squeezing the most out of everything but my phone","location":"San Francisco, CA","url":"http://www.niallkennedy.com/","id":1085,"protected":true,"status":{"created_at":"Sat Jun 02 04:32:57 +0000 2007","text":"your server migration is at 11. oh, we changed our minds, we're doing it at 9:30. ummm....yay?","id":87696902},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/1085/normal/niall_ringer.jpg?1171953434","screen_name":"niall"},{"name":"George P","description":"i am illicium","location":"Bay Area","url":null,"id":796724,"protected":true,"status":{"created_at":"Sat Jun 02 04:07:24 +0000 2007","text":"Hello, Twitter! Long time no see.","id":87673952},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/796724/normal/star.gif?1172880544","screen_name":"illicium"},{"name":"Daniel E. Renfer","description":"Freelance Individual","location":"Ypsilanti, MI","url":"http://kronkltd.net/","id":11491,"protected":false,"status":{"created_at":"Fri Jun 01 21:15:50 +0000 2007","text":"In the immortal words of Doug from MTV's The State: \"I'm outta heere\"","id":87306192},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/11491/normal/886.jpg?1171953691","screen_name":"duck1123"},{"name":"Simon Willison","description":null,"location":"London","url":"http://simonwillison.net/","id":12497,"protected":false,"status":{"created_at":"Fri Jun 01 21:01:59 +0000 2007","text":"Nat and I are in Brighton this weekend, anyone want to meet up?","id":87291372},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/12497/normal/298777290_a5ed9a4e70_m.jpg?1171954113","screen_name":"simonw"},{"name":"Yoz","description":"A small yoz-type object, currently residing in San Francisco","location":"San Francisco, CA","url":"http://yoz.com/","id":12329,"protected":false,"status":{"created_at":"Fri Jun 01 18:13:23 +0000 2007","text":"@riffraff814: Thanks but no thanks, don't drink coffee, my body is a temple filled with magical prancing ponies yay peanut M&M overdose","id":87132942},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/12329/normal/yozlap-100.jpg?1171954052","screen_name":"yoz"},{"name":"J Scud","description":"Software can be beautiful.","location":"Silicon Valley California","url":"http://jeffreyscudder.blogspot.com/","id":1359571,"protected":false,"status":{"created_at":"Fri Jun 01 07:14:58 +0000 2007","text":"Watching videos from Google Developer Day: http://tinyurl.com/ywr8cz","id":86425072},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/1359571/normal/temp_img.jpg?1177211826","screen_name":"jaguarjaws"},{"name":"Joe Duck","description":"Travel Internet Oregon Guy","location":"Oregon","url":"http://joeduck.wordpress.com","id":150433,"protected":false,"status":{"created_at":"Fri Jun 01 07:09:02 +0000 2007","text":"Removed Google downranking on an obscure travel page. Next test - non-obscure page fixes.http://tinyurl.com/227re8","id":86418302},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/150433/normal/joebiopic.jpg?1173598016","screen_name":"joeduck"},{"name":"Jon Phillips","description":"http://rejon.org/bio/","location":"SF","url":"http://rejon.org","id":744063,"protected":false,"status":{"created_at":"Fri Jun 01 04:59:45 +0000 2007","text":"Killing it...","id":86303992},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/744063/normal/jon_by_ryan_junell_fixed.png?1170353692","screen_name":"rejon"},{"name":"The BFF","description":"","location":"Netherlands","url":"http://doncrowley.blogspot.com/","id":1620121,"protected":false,"status":{"created_at":"Thu May 31 21:30:08 +0000 2007","text":"Twitteriffic addicts - some power tips: I am amazed at how many still do not know these tricks with twitte.. http://tinyurl.com/2mzjod","id":85901962},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/1620121/normal/bff.jpg?1174419110","screen_name":"TheBFF"},{"name":"Andy Armstrong","description":"Perl Bloke","location":"Cumbria, UK","url":"http://hexten.net","id":1022831,"protected":false,"status":{"created_at":"Thu May 31 18:58:03 +0000 2007","text":"37 downloads pending to iTMS. Keeps timing out. Bah. Anyone else?","id":85769802},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/1022831/normal/moi.jpg?1176741181","screen_name":"AndyArmstrong"},{"name":"Brian Suda","description":"SWM Informatician @64.132511;-21.906494 (microformats,GRDDL,XSLT,PHP,picoformats,XHTML)","location":"Iceland","url":"http://suda.co.uk/","id":15313,"protected":false,"status":{"created_at":"Thu May 31 17:54:18 +0000 2007","text":"is happy that tonight is the last night that you can smoke in bars and restaurants in Iceland. Washing machines won't be happy with this.","id":85708142},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/15313/normal/gravatar.png?1175011623","screen_name":"briansuda"},{"name":"arunaurl","description":"Make long URLs small!","location":"All over the web","url":"http://arunaurl.com","id":6371812,"protected":false,"status":{"created_at":"Tue May 29 23:49:15 +0000 2007","text":"Aruna URL now available as a Zimbra Collaboration Suite plug-in / zimlet http://arunaurl.com/b25","id":83248192},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/6371812/normal/Aruna_URL_logo.png?1180314185","screen_name":"arunaurl"},{"name":"Hiroshi Ayukawa","description":"A Japanese ordinary Python & C++ programmer","location":"Tokyo, Japan","url":null,"id":3786561,"protected":false,"status":{"created_at":"Tue May 29 16:22:31 +0000 2007","text":"I don't like to use JAVA any more... Let me sleep, Ahhhhh! Give me Python.","id":82802982},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/3786561/normal/muku.jpg?1176037335","screen_name":"hiroshiykw"},{"name":" Scriptless Day","description":"","location":"","url":"http://www.scriptlessday.com","id":3641801,"protected":false,"status":{"created_at":"Mon May 28 07:55:27 +0000 2007","text":"39 Days Left till Scriptless Day!! http://www.scriptlessday.com :D","id":81305782},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/3641801/normal/splash2.png?1175927287","screen_name":"scriptlessday"},{"name":"brady forrest","description":null,"location":null,"url":null,"id":6140,"protected":false,"status":{"created_at":"Sat May 26 02:51:53 +0000 2007","text":"im at convergence 13 in pdx. looking for events inspiration.","id":78751852},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/6140/normal/IMG_9629.jpg?1175295805","screen_name":"brady"},{"name":"TV with MeeVee","description":"We heart TV","location":"Burlingame, CA","url":"http://blog.meevee.com","id":2371741,"protected":false,"status":{"created_at":"Wed May 23 17:22:20 +0000 2007","text":"Nicole Richie Denies Rehab Rumors http://tinyurl.com/2hvn8h","id":75514922},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/2371741/normal/meevee_potato_ad3SoSF.jpg?1174950717","screen_name":"TVwithMeeVee"},{"name":"Emmet Connolly","description":"I have nothing to say (and I am saying it)","location":"Dublin","url":"http://blog.thoughtwax.com/","id":11323,"protected":false,"status":{"created_at":"Tue May 22 21:34:28 +0000 2007","text":"Virtual subdomains + .htaccess = ow, my brain. Anyway, three years late but my blog now has pretty urls. Hooray!","id":74472602},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/11323/normal/me.jpg?1171953619","screen_name":"thoughtwax"},{"name":"Bilal Hameed","description":"Editor of Startup Meme","location":"As if it matters","url":"http://startupmeme.com","id":5629992,"protected":false,"status":{"created_at":"Tue May 15 16:41:05 +0000 2007","text":"http://tinyurl.com/2h474q Use EasyPost To Send Snail Mail in Canada","id":65079402},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/5629992/normal/me.jpg?1177848235","screen_name":"startupmeme"},{"name":"Koen Sadza","description":"I study Applied Physics. My hobbies are my GF,SInging,Scouting,Clubbing,Computers","location":"Eindhoven, The Netherlands","url":"http://weblog.ksdz.nl/","id":803238,"protected":false,"status":{"created_at":"Sat May 12 14:16:37 +0000 2007","text":"Net terug van Scouting","id":61483292},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/803238/normal/kerst.jpg?1172746148","screen_name":"ksdz"},{"name":"Webtickle","description":"","location":"California","url":null,"id":1322691,"protected":false,"status":{"created_at":"Thu May 10 03:25:15 +0000 2007","text":"The Freelancer\u2019s Toolset: 100 Web Apps for Everything You Will Possibly Need\" http://tinyurl.com/2dztm8","id":58099692},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/1322691/normal/large4197.png?1174089345","screen_name":"Webtickle"},{"name":"Yes","description":null,"location":null,"url":null,"id":765884,"protected":false,"status":{"created_at":"Wed May 09 20:43:39 +0000 2007","text":"Whoo hoo! Yay!","id":57715832},"profile_image_url":"http://assets1.twitter.com/images/default_image.gif?1180755379","screen_name":"Yes"},{"name":"Leland Harding III","description":"Traditional country artist, from South Dakota with a major background in the country music scene","location":"South Dakota","url":"http://www.lelandharding.com","id":3545551,"protected":false,"status":{"created_at":"Tue May 08 08:01:31 +0000 2007","text":"Anyone who has been scammed by these people can get ahold of me at [email protected]","id":54614912},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/3545551/normal/image01-xml.jpg?1175797244","screen_name":"ldcountry"},{"name":"Trendio M","description":"","location":"","url":"http://www.trendio.com","id":5845532,"protected":false,"status":{"created_at":"Mon May 07 22:32:39 +0000 2007","text":"Open Source and Linux are up today. Which will be this week's top technology trends? http://tinyurl.com/2bpknb","id":53868212},"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/5845532/normal/logo48x48.gif?1178576662","screen_name":"Trendio_M"},{"name":"Patrick Wang","description":"","location":"San Francisco, CA","url":"http://junesix.org","id":799123,"protected":false,"status":{"created_at":"Mon Apr 16 21:57:48 +0000 2007","text":"@jabancroft optimized builds at beatnikpad.com","id":30322371},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/799123/normal/samurai_head.gif?1173313806","screen_name":"junesix"},{"name":"Dion Almaer","description":"ajaxian, googley, and techno","location":"Palo Alto, CA","url":"http://almaer.com/blog","id":4216361,"protected":false,"status":{"created_at":"Wed Apr 11 17:48:16 +0000 2007","text":"Enjoying being able to talk about the Google Developer Day!","id":24861261},"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/4216361/normal/logo-48x48.jpg?1176313520","screen_name":"dalmaer"},{"name":"Chris DiBona","description":null,"location":null,"url":null,"id":44423,"protected":true,"status":{"created_at":"Fri Mar 16 16:42:55 +0000 2007","text":"Cripes, trapped on the tarmac at iad. Today, we sit in hell:","id":8638201},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/44423/normal/103ID_DiBona.jpg?1171961329","screen_name":"cdibona"},{"name":"napoleone","description":"","location":"piacenza ITALY","url":null,"id":740133,"protected":false,"status":{"created_at":"Sat Feb 17 14:36:53 +0000 2007","text":"sono indeciso tra casual o un p\u00f2 sportive!!!","id":5555694},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/740133/normal/P6050088.jpg?1170800580","screen_name":"Cris"},{"name":"Brian Tucker","description":null,"location":null,"url":null,"id":759328,"protected":false,"status":{"created_at":"Sun Feb 11 04:11:11 +0000 2007","text":"Chinese new year starts next weekend I think.","id":5422032},"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/759328/normal/bt.png?1170987088","screen_name":"brian318"},{"name":"Josh H","description":null,"location":null,"url":null,"id":2831771,"protected":false,"profile_image_url":"http://assets1.twitter.com/images/default_image.gif?1180755379","screen_name":"josh59x"},{"name":"mfagan","description":"","location":"Canada","url":"http://faganm.com/","id":677403,"protected":false,"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/677403/normal/me_with_hat.jpg?1171966071","screen_name":"mfagan"}] |
1
testdata/friends.json
| @@ -0,0 +1 @@ | ||
| +{"users":[{"name":" T\u00c7","description":"Chem: Physics: Math: Logic: Observation: Analysis: Hypothesis: Experimentation: Iteration: Evolution: Science. It works, bitches.","location":"San Francisco, CA","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/11628/normal/icon200px.jpg?1171953743","url":"http://tantek.com/","id":11628,"screen_name":"t","protected":false,"status":{"created_at":"Sun Jun 03 10:34:10 +0000 2007","text":"pondering how 1 can b so tired & sore after exercise, yet so energized as 2 b up at 3:30am, w only *1* coffee instead of usual 3 per workday","id":89104612}},{"name":"adam","description":"http://ifindkarma.com/","location":"Palo Alto, CA","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/1688/normal/RifkinIcon.jpg?1179096764","url":"http://renkoo.com/profile/ee0e95249268b86ff2053bef214bfeda","id":1688,"screen_name":"ifindkarma","protected":false,"status":{"created_at":"Sun Jun 03 07:07:00 +0000 2007","text":"Norman Mailer on the idea of perfect happiness: \"A fool draws a road map to his magic city.\" (Vanity Fair, Jan 2007)","id":88952182}},{"name":"Alex King","description":"","location":"Denver, CO","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/101143/normal/alex_king.jpg?1171953145","url":"http://alexking.org","id":101143,"screen_name":"alexkingorg","protected":false,"status":{"created_at":"Sun Jun 03 19:06:54 +0000 2007","text":"@andrewhyde Coverage was poor enough in the area to drive me back to Sprint.","id":89554432}},{"name":"Andy Edmonds","description":null,"location":null,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/936361/normal/andy_headshot_48x48.png?1176350570","url":null,"id":936361,"screen_name":"andyed","protected":false,"status":{"created_at":"Sat Jun 02 22:09:55 +0000 2007","text":"Tries out squidoo at http://www.squidoo.com/eyetrack","id":88551872}},{"name":"anildash","description":"That blogging guy.","location":"New York, New York","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/36823/normal/6315878.jpg?1171960613","url":"http://www.anildash.com/","id":36823,"screen_name":"anildash","protected":false,"status":{"created_at":"Fri Jun 01 20:33:42 +0000 2007","text":"I AM BURNING MY FEED IN PROTEST OF THOSE BASTARDS SELLING OUT. WHO'S WITH ME?!","id":87263942}},{"name":"Biz Stone","description":"I work here!","location":"Berkeley, CA","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/13/normal/biz_toon.png?1171954299","url":"http://bizstone.com","id":13,"screen_name":"biz","protected":false,"status":{"created_at":"Sun Jun 03 02:36:51 +0000 2007","text":"Gmail down to 8 while Livy makes curried carrot soup and of course Star Trek Voyager is on marathon mode","id":88746562}},{"name":"brady forrest","description":null,"location":null,"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/6140/normal/IMG_9629.jpg?1175295805","url":null,"id":6140,"screen_name":"brady","protected":false,"status":{"created_at":"Sat May 26 02:51:53 +0000 2007","text":"im at convergence 13 in pdx. looking for events inspiration.","id":78751852}},{"name":"Brian Suda","description":"SWM Informatician @64.132511;-21.906494 (microformats,GRDDL,XSLT,PHP,picoformats,XHTML)","location":"Iceland","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/15313/normal/gravatar.png?1175011623","url":"http://suda.co.uk/","id":15313,"screen_name":"briansuda","protected":false,"status":{"created_at":"Thu May 31 17:54:18 +0000 2007","text":"is happy that tonight is the last night that you can smoke in bars and restaurants in Iceland. Washing machines won't be happy with this.","id":85708142}},{"name":"Buzz Andersen","description":null,"location":"San Francisco, CA","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/528/normal/buzz-jacket-tiny.jpg?1171962059","url":"http://buzz.vox.com","id":528,"screen_name":"buzz","protected":false,"status":{"created_at":"Sun Jun 03 18:56:37 +0000 2007","text":"At Madison Square Park, waiting to meet my Shake Shack compatriots.","id":89543882}},{"name":"Case","description":null,"location":"San Francisco, CA, USA","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/409/normal/me-skype.jpg?1177561455","url":"http://vedana.net/","id":409,"screen_name":"Case","protected":true,"status":{"created_at":"Sun Jun 03 18:25:46 +0000 2007","text":"still buzzing from the arcade fire show, listening to neon bible and dancing around in delight!","id":89518902}},{"name":"Chris DiBona","description":null,"location":null,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/44423/normal/103ID_DiBona.jpg?1171961329","url":null,"id":44423,"screen_name":"cdibona","protected":true,"status":{"created_at":"Fri Mar 16 16:42:55 +0000 2007","text":"Cripes, trapped on the tarmac at iad. Today, we sit in hell:","id":8638201}},{"name":"Dave McClure","description":"Master of 500 Hats","location":"silicon valley, sf bay area","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/1081/normal/guido3.jpg?1171953420","url":"http://500hats.typepad.com","id":1081,"screen_name":"davemc500hats","protected":false,"status":{"created_at":"Fri Jun 01 09:35:11 +0000 2007","text":"just blogged about Sacks, Facebook, Wisdom of Crowds http://tinyurl.com/28v44h","id":86555632}},{"name":"Dion Almaer","description":"ajaxian, googley, and techno","location":"Palo Alto, CA","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/4216361/normal/logo-48x48.jpg?1176313520","url":"http://almaer.com/blog","id":4216361,"screen_name":"dalmaer","protected":false,"status":{"created_at":"Wed Apr 11 17:48:16 +0000 2007","text":"Enjoying being able to talk about the Google Developer Day!","id":24861261}},{"name":"eric L","description":"mobile expert, internet idiot","location":"san francisco","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/8291/normal/bandit.jpg?1165079078","url":"http://www.n1s.net","id":8291,"screen_name":"n1s","protected":false,"status":{"created_at":"Sun Jun 03 03:08:04 +0000 2007","text":"A howat once said there's no scorin with the sporin.","id":88771502}},{"name":"Evan Williams","description":"Founder of Obvious ","location":"San Francisco, CA","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/20/normal/ev-sky.jpg?1175282926","url":"http://evhead.com","id":20,"screen_name":"ev","protected":false,"status":{"created_at":"Sun Jun 03 07:04:50 +0000 2007","text":"This bathroom has an overload of marble","id":88950312}},{"name":"Greg Stein","description":null,"location":null,"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/12449/normal/gstein-by-pdcawley-cropped.jpg?1171954097","url":null,"id":12449,"screen_name":"gstein","protected":false},{"name":"Ian McKellar","description":"","location":"San Francisco, CA","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/259/normal/trapped.jpg?1171957894","url":"http://ian.mckellar.org/","id":259,"screen_name":"ianmckellar","protected":false,"status":{"created_at":"Sat Jun 02 20:55:30 +0000 2007","text":"lolfeeds got shut down for using too much cpu so I had to get around to adding a caching layer. it fixed some character encoding issues too!","id":88496342}},{"name":"jark","description":"Co-Founder of deviantART","location":"Tokyo, Japan","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/39653/normal/jark-static.jpg?1171960858","url":"http://jarkolicious.com/","id":39653,"screen_name":"jark","protected":false,"status":{"created_at":"Sun Jun 03 14:51:25 +0000 2007","text":"heads to bed, but first starts a process to burn 300 to DVD.","id":89318752}},{"name":"Jeff Barr","description":"Amazon Web Services Evangelist, Blogger, Father of 5.","location":"Sammamish, Washington, USA","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/48443/normal/jeff_barr.jpg?1171961668","url":"http://www.jeff-barr.com","id":48443,"screen_name":"jeffbarr","protected":false,"status":{"created_at":"Sun Jun 03 16:43:47 +0000 2007","text":"Preparing for trip to DC tomorrow AM - lots of reading materials, TODO list, iPod fresh, camera charged, laptop packed. Arrange for taxi.","id":89432112}},{"name":"Jeremy Zawodny","description":"I fly and geek.","location":"San Jose, CA","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/97933/normal/Zawodny-md.jpg?1166680073","url":"http://jeremy.zawodny.com/blog/","id":97933,"screen_name":"jzawodn","protected":false,"status":{"created_at":"Sat Jun 02 20:04:53 +0000 2007","text":"errands and packing for a week in the desert next week...","id":88458352}},{"name":"John Gruber","description":"Raconteur.","location":"Philadelphia","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/33423/normal/gruber-wanamaker-monorail.jpg?1171960346","url":"http://daringfireball.net","id":33423,"screen_name":"gruber","protected":false,"status":{"created_at":"Sun Jun 03 14:51:07 +0000 2007","text":"Needless to say, it was great time.","id":89318502}},{"name":"Josh H","description":null,"location":null,"profile_image_url":"http://assets1.twitter.com/images/default_image.gif?1180755379","url":null,"id":2831771,"screen_name":"josh59x","protected":false},{"name":"Josh Lucas","description":"Just adding another bit of distraction...","location":"Pasadena, CA","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/47023/normal/cubs_me.jpg?1171961535","url":"http://www.stonecottage.com/josh/","id":47023,"screen_name":"lucasjosh","protected":false,"status":{"created_at":"Sun Jun 03 15:16:47 +0000 2007","text":"wearing my new old-skool upcoming shirt","id":89345632}},{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","url":null,"id":718443,"screen_name":"kesuke","protected":false,"status":{"created_at":"Sun Jun 03 18:15:29 +0000 2007","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":89512102}},{"name":"Kevin Burton","description":null,"location":null,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/62763/normal/me-profile.jpg?1173387685","url":null,"id":62763,"screen_name":"burtonator","protected":false,"status":{"created_at":"Sun Jun 03 08:43:41 +0000 2007","text":"zoe is playing with a plastic ring from a milk bottle.... cheap toy!","id":89021982}},{"name":"mfagan","description":"","location":"Canada","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/677403/normal/me_with_hat.jpg?1171966071","url":"http://faganm.com/","id":677403,"screen_name":"mfagan","protected":false},{"name":"Mihai","description":"","location":"New York, NY","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/28203/normal/mihaip.jpg?1171958069","url":"http://persistent.info/","id":28203,"screen_name":"mihai","protected":false,"status":{"created_at":"Sat Jun 02 19:46:47 +0000 2007","text":"Back on campus for reunions. Don't feel old just yet. Phew.","id":88444532}},{"name":"Mr Messina","description":"As if concentrating wasn't hard enough already.","location":"94107","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/1186/normal/devil_150.jpg?1171953828","url":"http://factoryjoe.com/","id":1186,"screen_name":"factoryjoe","protected":false,"status":{"created_at":"Sun Jun 03 01:42:47 +0000 2007","text":"OpenID won the disruptor award at The NextWeb conference! http://tinyurl.com/yq8e89","id":88706172}},{"name":"Niall","description":"Squeezing the most out of everything but my phone","location":"San Francisco, CA","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/1085/normal/niall_ringer.jpg?1171953434","url":"http://www.niallkennedy.com/","id":1085,"screen_name":"niall","protected":true,"status":{"created_at":"Sat Jun 02 04:32:57 +0000 2007","text":"your server migration is at 11. oh, we changed our minds, we're doing it at 9:30. ummm....yay?","id":87696902}},{"name":"Nick Douglas","description":"I am clever. Are you clever too?","location":"San Francisco","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/1084/normal/Nick-tiny-face.jpg?1171953430","url":"http://lookshiny.com","id":1084,"screen_name":"nick","protected":false,"status":{"created_at":"Sun Jun 03 07:52:33 +0000 2007","text":"Even Mark Day was at Arcade Fire, and I conned myself out of going with a cute friend 'cuz I'd told her how much I hate the band.","id":88984072}},{"name":"Paul Downey","description":"Computing Industry Bi-product","location":"Berkhamsted, UK","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/13486/normal/psd-75x75.jpg?1171954493","url":"http://blog.whatfettle.com","id":13486,"screen_name":"psd","protected":false,"status":{"created_at":"Sun Jun 03 16:24:53 +0000 2007","text":"back from tea and cake at the village hall now resuming futile search for a family holiday on 'tinternet","id":89413652}},{"name":"sean coon","description":"trying to make a living and a difference...","location":"Greensboro, NC","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/677903/normal/sean-85.png?1173252480","url":"http://www.seancoon.org","id":677903,"screen_name":"spcoon","protected":false,"status":{"created_at":"Sat Jun 02 08:27:03 +0000 2007","text":"Holy moly. Time to go fishing. Yeeeaaahhh!","id":87876222}},{"name":"Simon Willison","description":null,"location":"London","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/12497/normal/298777290_a5ed9a4e70_m.jpg?1171954113","url":"http://simonwillison.net/","id":12497,"screen_name":"simonw","protected":false,"status":{"created_at":"Fri Jun 01 21:01:59 +0000 2007","text":"Nat and I are in Brighton this weekend, anyone want to meet up?","id":87291372}},{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","url":"http://redmonk.com/sogrady","id":143883,"screen_name":"sogrady","protected":false,"status":{"created_at":"Sun Jun 03 15:57:16 +0000 2007","text":"ah, it's under \"Reply\"","id":89385112}},{"name":"Tatsuhiko Miyagawa","description":"Yet another Perl hacker","location":"San Francisco","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/731253/normal/P506iC0003735833.jpg?1170146286","url":"http://bulknews.vox.com/","id":731253,"screen_name":"miyagawa","protected":false,"status":{"created_at":"Sun Jun 03 04:06:57 +0000 2007","text":"had a good rice/veggie/pork noodle and shrimp fried rice in HoH. Feeling almost full and too ricey","id":88820042}},{"name":"Tom Coates","description":"Scruffy, grumpy, social mediaesque...","location":"London","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/12514/normal/12037949715_N01.jpg?1178111032","url":"http://www.plasticbag.org/","id":12514,"screen_name":"plasticbagUK","protected":false,"status":{"created_at":"Sun Jun 03 18:21:20 +0000 2007","text":"My heart sinks as I pass Hackney Down.","id":89515562}},{"name":"veen","description":"I used to make small things. Now I make big things.","location":"San Francisco","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/414/normal/Photo_21.jpg?1171961099","url":"http://veen.com/jeff/","id":414,"screen_name":"veen","protected":false,"status":{"created_at":"Fri Jun 01 22:38:36 +0000 2007","text":"Received an absolutely beautiful wedding invitation, made even better by an Obi Wan Kenobi postage stamp.","id":87377882}},{"name":"Veronica","description":"CNET TV host and podcasting diva of Buzz Out Loud","location":"San Francisco","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/10350/normal/mypictr_140x144.jpg?1179253942","url":"http://www.veronicabelmont.com","id":10350,"screen_name":"Veronica","protected":false,"status":{"created_at":"Sun Jun 03 03:11:10 +0000 2007","text":"i just saw kari byron! my hero!","id":88774212}},{"name":"Yes","description":null,"location":null,"profile_image_url":"http://assets1.twitter.com/images/default_image.gif?1180755379","url":null,"id":765884,"screen_name":"Yes","protected":false,"status":{"created_at":"Wed May 09 20:43:39 +0000 2007","text":"Whoo hoo! Yay!","id":57715832}},{"name":"Yoz","description":"A small yoz-type object, currently residing in San Francisco","location":"San Francisco, CA","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/12329/normal/yozlap-100.jpg?1171954052","url":"http://yoz.com/","id":12329,"screen_name":"yoz","protected":false,"status":{"created_at":"Fri Jun 01 18:13:23 +0000 2007","text":"@riffraff814: Thanks but no thanks, don't drink coffee, my body is a temple filled with magical prancing ponies yay peanut M&M overdose","id":87132942}}]} |
1
testdata/friends_timeline-kesuke.json
| @@ -0,0 +1 @@ | ||
| +[{"created_at":"Sun Jun 03 18:15:29 +0000 2007","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":89512102,"user":{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","url":null,"id":718443,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","screen_name":"kesuke"}},{"created_at":"Sat Jun 02 17:13:19 +0000 2007","text":"At WhereCamp on yahoo's campus. Great crowd.","id":88313822,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Fri Jun 01 19:17:36 +0000 2007","text":"Yay Feedburner.","id":87194962,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Wed May 09 19:47:06 +0000 2007","text":"Just ordered a Wii from Amazon. Thanks to Greg!","id":57662052,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Fri Apr 27 21:01:56 +0000 2007","text":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002(No really, it is.)","id":42342562,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Fri Apr 27 19:02:43 +0000 2007","text":"Whoops. My python-twitter library can't handle utf8. On the upside, sending random twitters in Hungarian gets people to un-follow me...","id":42237742,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Fri Apr 27 15:34:12 +0000 2007","text":"A l\u00e9gp\u00e1rn\u00e1s haj\u00f3m tele van angoln\u00e1kkal.","id":41984622,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Mon Apr 23 03:45:20 +0000 2007","text":"And that, my friends, is baseball at its finest.","id":36527662,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Mon Apr 02 15:32:55 +0000 2007","text":"Fixed a python-twitter bug that returned broken relative_created_at in some cases. New version at: http://code.google.com/p/python-twitter/","id":17729991,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Fri Mar 30 02:46:53 +0000 2007","text":"Just got home, and hoping that Buzz's office hours are enough to get me to go out again tonight.","id":15587961,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Mon Mar 12 15:45:01 +0000 2007","text":"The latest !!! album is making me happy this morning.","id":7015091,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Wed Mar 07 03:25:59 +0000 2007","text":"Drove home to Arcade Fire's Neon Bible. Flawed around the middle, it is still the best thing I've heard this year. I really needed this.","id":5895171,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Mon Mar 05 16:15:57 +0000 2007","text":"Just updated all my Linux boxes to handle the changes to daylight savings time. Have you? Write me if you need a hand.","id":5854199,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Fri Mar 02 04:47:39 +0000 2007","text":"Yay! My first earthquake. Holy crap! They're kinda scary, aren't they?","id":5787870,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Thu Mar 01 04:36:14 +0000 2007","text":"Headed to Alembic early for Buzz's office hours.","id":5765262,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Wed Feb 28 22:57:54 +0000 2007","text":"Are the Twitter APIs a tad temperamental today? ","id":5760759,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Sat Feb 24 03:52:16 +0000 2007","text":"On route to Mad Dog in the Fog to see Brendan and Corrie. 3 years in the Bay Area and I still haven't felt a quake.","id":5671516,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Sat Feb 17 16:10:12 +0000 2007","text":"Just preordered Modest Mouse _We Were Dead Before the Ship Even Sank_ and Arcade Fire _Neon Bible_. I need to hibernate until March now.","id":5556806,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Wed Feb 14 22:59:09 +0000 2007","text":"Uh-oh. My IM away message just got Twittered. That's not a good thing.","id":5510368,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}},{"created_at":"Wed Feb 14 22:39:25 +0000 2007","text":"In meetings","id":5510068,"user":{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"}}] |
1
testdata/friendship-create.json
| @@ -0,0 +1 @@ | ||
| +{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"status":{"created_at":"Sun Jun 03 19:50:23 +0000 2007","text":"If a theme song played when I walked around all day, I'd want it to be All My Friends by LCD Soundsystem.","id":89586072},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"} |
1
testdata/friendship-destroy.json
| @@ -0,0 +1 @@ | ||
| +{"name":"DeWitt","description":"Indeterminate things","location":"San Francisco, CA","url":"http://unto.net/","id":673483,"protected":false,"status":{"created_at":"Sun Jun 03 19:50:23 +0000 2007","text":"If a theme song played when I walked around all day, I'd want it to be All My Friends by LCD Soundsystem.","id":89586072},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"} |
1
testdata/public_timeline.json
| @@ -0,0 +1 @@ | ||
| +[{"created_at":"Sun Jun 03 17:59:37 +0000 2007","text":"Warcry Blog: Anjas first Birthday: Today was Anjas first birthday, It is insane that a year .. http://tinyurl.com/2x7omb","id":89497702,"user":{"name":"Patrik Olterman","description":"","location":"Riga","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/2038581/normal/dsc03106.jpg?1180337431","url":"http://warcry.olterman.se","id":2038581,"screen_name":"olterman","protected":false}},{"created_at":"Sun Jun 03 18:01:01 +0000 2007","text":"3am AEST - AWAYE! (Listen Up) - http://tinyurl.com/38nuza","id":89497692,"user":{"name":"ABC Radio National","description":"","location":"Australia","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/3496461/normal/abc.jpg?1176239249","url":"http://abc.net.au/rn/","id":3496461,"screen_name":"abcrn","protected":false}},{"created_at":"Sun Jun 03 17:59:36 +0000 2007","text":"I am going to brush my teeth cuz i just woke up","id":89497682,"user":{"name":"alanna esposito","description":"","location":"","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/3467431/normal/legzz.jpg?1175725871","url":null,"id":3467431,"screen_name":"luhhsespino","protected":false}},{"created_at":"Sun Jun 03 18:00:58 +0000 2007","text":"Listening to an old Blur album ('13').","id":89497672,"user":{"name":"Lex The Hex","description":"I love anything geeky, love gadgets, find it very difficult to talk to people.","location":"North Devon, UK","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/429783/normal/IMAGE_099.jpg?1180813787","url":"http://brooknet.org","id":429783,"screen_name":"lexthehex","protected":false}},{"created_at":"Sun Jun 03 18:00:58 +0000 2007","text":"taking pics of myself 4 my facebook.","id":89497662,"user":{"name":"Lexi Jackson","description":"","location":"","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/6525642/normal/cute_dog.jpg?1180803976","url":null,"id":6525642,"screen_name":"LexiJackson","protected":false}},{"created_at":"Sun Jun 03 17:59:33 +0000 2007","text":"I am getting bored. Wish i could entertain myself. Solution any one?","id":89497632,"user":{"name":"Samuel Joos","description":null,"location":null,"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/6517892/normal/datbenik.jpg?1180766995","url":null,"id":6517892,"screen_name":"Flashingback","protected":false}},{"created_at":"Sun Jun 03 18:00:56 +0000 2007","text":"@definetheline; STFU!!!!!!","id":89497622,"user":{"name":"Alejandro [correa]","description":"I'm like quicksand. Sandy & quick.","location":"Miami","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/639343/normal/avatar.jpg?1180748145","url":"http://flickr.com/photos/alej744","id":639343,"screen_name":"alej744","protected":false}},{"created_at":"Sun Jun 03 18:00:53 +0000 2007","text":"Haciendo monstruos","id":89497582,"user":{"name":"Jimena Vega","description":"Vean mis Monstruos: www.monstersncutties.wordpress.com","location":"M\u00e8xico","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/761245/normal/511658650_2c1e59ce98_t.jpg?1179977645","url":"http://www.shamballa.fulguris.net","id":761245,"screen_name":"shamballa","protected":false}},{"created_at":"Sun Jun 03 17:59:29 +0000 2007","text":"heading to chinatown for roast pork n duck n bubble tea","id":89497572,"user":{"name":"Corinne","description":"","location":"","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/815054/normal/oops.jpg?1173239376","url":"http://25cents.wordpress.com","id":815054,"screen_name":"xcori","protected":false}},{"created_at":"Sun Jun 03 17:59:29 +0000 2007","text":"I'm actually really enjoying Facebook. The calendars and stuff are very organizational. I'm an organization freak! Perfect for me!","id":89497562,"user":{"name":"The Mighty Mommy","description":"Mommy of Two, Wife of One, Friend to Many!","location":"Arizona","profile_image_url":"http://assets2.twitter.com/system/user/profile_image/772523/normal/miimage.jpg?1179497687","url":"http://mightymommy.qdnow.com","id":772523,"screen_name":"MightyMommy","protected":false}},{"created_at":"Sun Jun 03 18:00:52 +0000 2007","text":"Off for tonight, see you tomorrow.","id":89497552,"user":{"name":"Benjamin Gauthey","description":"","location":"","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/3194221/normal/me.jpg?1179996565","url":"http://www.benjamingauthey.com","id":3194221,"screen_name":"benjamingauthey","protected":false}},{"created_at":"Sun Jun 03 18:00:52 +0000 2007","text":"[Blog Updated] Weewar - \u5c0f\u578b\u5728\u7ebf\u5373\u65f6\u6218\u7565\u6e38\u620f http://tinyurl.com/33yfa2","id":89497532,"user":{"name":"Lyang","description":"\"If we lose, then what the hell, at least we died trying...\" Digg On!","location":"Shanghai.CN","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/5144201/normal/elusive.png?1178901205","url":"http://a7xorz.blogspot.com","id":5144201,"screen_name":"a7xorz","protected":false}},{"created_at":"Sun Jun 03 18:00:51 +0000 2007","text":"[WangTam] Multiverse \u83b7\u6536 400 \u4e07\u7f8e\u5143\u6295\u8d44: Second Life \u7684\u6210\u529f\u5e76\u975e\u5076\u7136\uff0c\u5efa\u7acb\u5728\u865a\u62df\u4e16\u754c\u57fa\u7840\u4e0a\u7684\u793e\u4f1a\u5316\u7f51\u7edc\uff0c\u5176\u5e94\u7528\u8d8a\u6765\u8d8a\u6df1\u5165\u666e\u904d\u7528\u6237\u65e5\u5e38\u7f51\u7edc\u751f\u6d3b\u3002\u4ee5\u540e\u7c7b\u4f3c\u4e8e .. http://tinyurl.com/36j5r6","id":89497522,"user":{"name":"\u542f\u7f81","description":"\u88c5\u50bb\u6bd4\u88c5\u903c\u66f4\u6709\u76ca\u4e8e\u8eab\u5fc3\u5065\u5eb7","location":"\u714b (Mars)","profile_image_url":"http://assets3.twitter.com/system/user/profile_image/917171/normal/QeeGi_Radlin.png?1176903251","url":"http://www.wangtam.com","id":917171,"screen_name":"QeeGi","protected":false}},{"created_at":"Sun Jun 03 18:00:51 +0000 2007","text":"Adoring the sublime architecture of the universe, including you.","id":89497512,"user":{"name":"Jon","description":"The millionth monkey.","location":"A castle. In SPACE!","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/6239122/normal/jonbasscropped2.jpg?1179858686","url":"http://www.jonglassett.com","id":6239122,"screen_name":"jonniejerko","protected":false}},{"created_at":"Sun Jun 03 18:00:49 +0000 2007","text":"\u9154\u3063\u305f\u3002\u5b9f\u306b\u9154\u3063\u305f\u3002\u76ee\u306e\u524d\u306e\u5b9a\u7fa9\u3092\u7c21\u5358\u306b\u65ad\u5b9a\u3059\u308b\u307b\u3069\u9154\u3063\u3066\u3057\u307e\u3063\u305f\uff01","id":89497482,"user":{"name":"wazurai","description":"\u7169\u3046\u4e8b\u3092\u4ed5\u4e8b\u306b\u98df\u3079\u3066\u3044\u3051\u308b\u69d8\u306b\u8abf\u7bc0\u4e2d\u306e\u6bce\u65e5\u3002","location":"\u65e5\u672c\u306e\u6771\u4eac\u306e\u7acb\u5ddd\u306e\u6771\u3042\u305f\u308a\u3002","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/6464452/normal/382616310_249-1.jpg?1180682927","url":"http://wazurai.org/","id":6464452,"screen_name":"wazurai","protected":false}},{"created_at":"Sun Jun 03 18:00:47 +0000 2007","text":"aaarrrggghhhh...looks like a bad day for air travel.","id":89497442,"user":{"name":"Andrew DeVigal","description":"","location":"New York, NY","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/626103/normal/andrewdevigal_lores.jpg?1171963676","url":"http://andrew.devigal.com/","id":626103,"screen_name":"drewvigal","protected":false}},{"created_at":"Sun Jun 03 18:00:46 +0000 2007","text":"\u3010\u97d3\u56fd\u3011\u82f1\u8a9e\u304c\u308f\u304b\u3089\u306a\u3044\u3068\u30bf\u30af\u30b7\u30fc\u904b\u8ee2\u624b\u3092\u6bb4\u3063\u305f\u30a2\u30e1\u30ea\u30ab\u4eba\u3092\u5728\u5b85\u8d77\u8a34 http://tinyurl.com/38bhj8","id":89497432,"user":{"name":"2NN","description":"","location":"","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/5507772/normal/2NN.gif?1177538505","url":"http://www.2nn.jp/","id":5507772,"screen_name":"2NN","protected":false}},{"created_at":"Sun Jun 03 18:00:46 +0000 2007","text":"Why gas prices are so pumped up: Gas experts place the blame for high prices on a national gas shortage .. http://tinyurl.com/ysc8rm","id":89497402,"user":{"name":"Netscape","description":null,"location":null,"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/822660/normal/Picture_3.png?1173380596","url":null,"id":822660,"screen_name":"Netscape","protected":false}},{"created_at":"Sun Jun 03 18:00:45 +0000 2007","text":"@erfani \u0634\u0646\u06cc\u062f\u0647 \u0628\u0648\u062f\u0645","id":89497392,"user":{"name":"Mahdi kazzazi","description":"","location":"","profile_image_url":"http://assets0.twitter.com/system/user/profile_image/5630472/normal/miimage.jpg?1180550654","url":"http://www.persiandeveloper.com/","id":5630472,"screen_name":"MMahdi","protected":false}},{"created_at":"Sun Jun 03 18:00:44 +0000 2007","text":"De a Fried k\u00f6nyve is!","id":89497372,"user":{"name":"Balint Sera","description":null,"location":null,"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/5861892/normal/pic.jpg?1178628338","url":null,"id":5861892,"screen_name":"damnadm","protected":false}}] |
1
testdata/public_timeline_error.json
| @@ -0,0 +1 @@ | ||
| +{"error": "test error"} |
1
testdata/replies.json
| @@ -0,0 +1 @@ | ||
| +[{"created_at":"Mon Apr 23 06:56:04 +0000 2007","text":"@dewitt - touche.","id":36657062,"user":{"name":"sean coon","description":"trying to make a living and a difference...","location":"Greensboro, NC","url":"http://www.seancoon.org","id":677903,"protected":false,"profile_image_url":"http://assets3.twitter.com/system/user/profile_image/677903/normal/sean-85.png?1173252480","screen_name":"spcoon"}},{"created_at":"Sat Feb 10 22:26:08 +0000 2007","text":"@ DeWitt: say hey to Fairway for me","id":5418088,"user":{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","url":"http://redmonk.com/sogrady","id":143883,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","screen_name":"sogrady"}},{"created_at":"Fri Feb 09 21:24:23 +0000 2007","text":"@dewitt - imho, \"meme-ish\" would be an excellent definition of the Internet in general. ","id":5398801,"user":{"name":"Veronica","description":"CNET TV host and podcasting diva of Buzz Out Loud","location":"San Francisco","url":"http://www.veronicabelmont.com","id":10350,"protected":false,"profile_image_url":"http://assets2.twitter.com/system/user/profile_image/10350/normal/mypictr_140x144.jpg?1179253942","screen_name":"Veronica"}},{"created_at":"Mon Feb 05 20:13:20 +0000 2007","text":"@ DeWitt: hahaha - figured as much. only time in NYC in two months and it has to be Valentine's day.","id":5327278,"user":{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","url":"http://redmonk.com/sogrady","id":143883,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","screen_name":"sogrady"}},{"created_at":"Mon Feb 05 20:03:53 +0000 2007","text":"@ DeWitt (and world): i'm in the 14th, and probably back out midday the 15th. if valentine's day isn't already spoken for, i'm game.","id":5327163,"user":{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","url":"http://redmonk.com/sogrady","id":143883,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","screen_name":"sogrady"}},{"created_at":"Mon Feb 05 19:59:46 +0000 2007","text":"@ DeWitt: what days will you be in NYC?","id":5327090,"user":{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","url":"http://redmonk.com/sogrady","id":143883,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","screen_name":"sogrady"}},{"created_at":"Wed Jan 31 22:29:38 +0000 2007","text":"@DeWitt: you'll like it. picked it up this afternoon in non-DRM crap form ;) better than Wincing the Night Away so far, for me","id":4895723,"user":{"name":"steve o'grady","description":"analyst and co-founder of RedMonk","location":"Denver, CO","url":"http://redmonk.com/sogrady","id":143883,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/143883/normal/headshot.jpg?1174273279","screen_name":"sogrady"}}] |
1
testdata/show-89512102.json
| @@ -0,0 +1 @@ | ||
| +{"created_at":"Sun Jun 03 18:15:29 +0000 2007","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":89512102,"user":{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","url":null,"id":718443,"screen_name":"kesuke","protected":false}} |
1
testdata/show-dewitt.json
| @@ -0,0 +1 @@ | ||
| +{"friends_count":40,"profile_background_color":"FFFFFF","name":"DeWitt","statuses_count":71,"followers_count":64,"profile_text_color":"121212","favourites_count":2,"profile_link_color":"666666","description":"Indeterminate things","location":"San Francisco, CA","profile_sidebar_fill_color":"CCCCCC","url":"http://unto.net/","id":673483,"profile_sidebar_border_color":"333333","protected":false,"status":{"created_at":"Sun Jun 03 19:50:23 +0000 2007","text":"If a theme song played when I walked around all day, I'd want it to be All My Friends by LCD Soundsystem.","id":89586072},"profile_image_url":"http://assets0.twitter.com/system/user/profile_image/673483/normal/me.jpg?1171965914","screen_name":"dewitt"} |
1
testdata/status-destroy.json
| @@ -0,0 +1 @@ | ||
| +{"created_at":"Wed Jun 13 17:08:02 +0000 2007","text":"Just a final test before 0.4 release!","id":103208352,"user":{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","url":null,"id":718443,"screen_name":"kesuke","protected":false}} |
1
testdata/update.json
| @@ -0,0 +1 @@ | ||
| +{"created_at":"Sun Jun 03 18:15:29 +0000 2007","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":89512102,"user":{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","url":null,"id":718443,"screen_name":"kesuke","protected":false}} |
1
testdata/user_timeline-kesuke.json
| @@ -0,0 +1 @@ | ||
| +[{"created_at":"Sun Jun 03 18:15:29 +0000 2007","text":"\u041c\u043e\u0451 \u0441\u0443\u0434\u043d\u043e \u043d\u0430 \u0432\u043e\u0437\u0434\u0443\u0448\u043d\u043e\u0439 \u043f\u043e\u0434\u0443\u0448\u043a\u0435 \u043f\u043e\u043b\u043d\u043e \u0443\u0433\u0440\u0435\u0439","id":89512102,"user":{"name":"Kesuke Miyagi","description":"\u79c1\u306e\u30db\u30d0\u30fc\u30af\u30e9\u30d5\u30c8 \u306f\u9c3b\u304c\u4e00\u676f\u3067\u3059\u3002","location":"Okinawa, Japan","url":null,"id":718443,"protected":false,"profile_image_url":"http://assets1.twitter.com/system/user/profile_image/718443/normal/kesuke.png?1169966399","screen_name":"kesuke"}}] |
1
testdata/user_timeline.json
| @@ -0,0 +1 @@ | ||
| +[{"user": {"name": "DeWitt", "url": "http://unto.net/", "id": 673483, "description": "Indeterminate things", "screen_name": "dewitt", "location": "San Francisco, CA"}, "text": "\"Select all\" and archive your Gmail inbox. The page loads so much faster!", "id": 4212713, "relative_created_at": "2 days ago", "created_at": "Fri Jan 26 17:28:19 +0000 2007"}] |
3,969
twitter.py
3,969 additions,
0 deletions
not shown because the diff is too large. Please use a local Git client to view these changes.
614
twitter_test.py
| @@ -0,0 +1,614 @@ | ||
| +#!/usr/bin/python2.4 | ||
| +# -*- coding: utf-8 -*-# | ||
| +# | ||
| +# Copyright 2007 The Python-Twitter Developers | ||
| +# | ||
| +# Licensed under the Apache License, Version 2.0 (the "License"); | ||
| +# you may not use this file except in compliance with the License. | ||
| +# You may obtain a copy of the License at | ||
| +# | ||
| +# http://www.apache.org/licenses/LICENSE-2.0 | ||
| +# | ||
| +# Unless required by applicable law or agreed to in writing, software | ||
| +# distributed under the License is distributed on an "AS IS" BASIS, | ||
| +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| +# See the License for the specific language governing permissions and | ||
| +# limitations under the License. | ||
| + | ||
| +'''Unit tests for the twitter.py library''' | ||
| + | ||
| +__author__ = '[email protected]' | ||
| + | ||
| +import os | ||
| +import simplejson | ||
| +import time | ||
| +import calendar | ||
| +import unittest | ||
| +import urllib | ||
| + | ||
| +import twitter | ||
| + | ||
| +class StatusTest(unittest.TestCase): | ||
| + | ||
| + SAMPLE_JSON = '''{"created_at": "Fri Jan 26 23:17:14 +0000 2007", "id": 4391023, "text": "A l\u00e9gp\u00e1rn\u00e1s haj\u00f3m tele van angoln\u00e1kkal.", "user": {"description": "Canvas. JC Penny. Three ninety-eight.", "id": 718443, "location": "Okinawa, Japan", "name": "Kesuke Miyagi", "profile_image_url": "https://twitter.com/system/user/profile_image/718443/normal/kesuke.png", "screen_name": "kesuke", "url": "https://twitter.com/kesuke"}}''' | ||
| + | ||
| + def _GetSampleUser(self): | ||
| + return twitter.User(id=718443, | ||
| + name='Kesuke Miyagi', | ||
| + screen_name='kesuke', | ||
| + description=u'Canvas. JC Penny. Three ninety-eight.', | ||
| + location='Okinawa, Japan', | ||
| + url='https://twitter.com/kesuke', | ||
| + profile_image_url='https://twitter.com/system/user/pro' | ||
| + 'file_image/718443/normal/kesuke.pn' | ||
| + 'g') | ||
| + | ||
| + def _GetSampleStatus(self): | ||
| + return twitter.Status(created_at='Fri Jan 26 23:17:14 +0000 2007', | ||
| + id=4391023, | ||
| + text=u'A légpárnás hajóm tele van angolnákkal.', | ||
| + user=self._GetSampleUser()) | ||
| + | ||
| + def testInit(self): | ||
| + '''Test the twitter.Status constructor''' | ||
| + status = twitter.Status(created_at='Fri Jan 26 23:17:14 +0000 2007', | ||
| + id=4391023, | ||
| + text=u'A légpárnás hajóm tele van angolnákkal.', | ||
| + user=self._GetSampleUser()) | ||
| + | ||
| + def testGettersAndSetters(self): | ||
| + '''Test all of the twitter.Status getters and setters''' | ||
| + status = twitter.Status() | ||
| + status.SetId(4391023) | ||
| + self.assertEqual(4391023, status.GetId()) | ||
| + created_at = calendar.timegm((2007, 1, 26, 23, 17, 14, -1, -1, -1)) | ||
| + status.SetCreatedAt('Fri Jan 26 23:17:14 +0000 2007') | ||
| + self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', status.GetCreatedAt()) | ||
| + self.assertEqual(created_at, status.GetCreatedAtInSeconds()) | ||
| + status.SetNow(created_at + 10) | ||
| + self.assertEqual("about 10 seconds ago", status.GetRelativeCreatedAt()) | ||
| + status.SetText(u'A légpárnás hajóm tele van angolnákkal.') | ||
| + self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', | ||
| + status.GetText()) | ||
| + status.SetUser(self._GetSampleUser()) | ||
| + self.assertEqual(718443, status.GetUser().id) | ||
| + | ||
| + def testProperties(self): | ||
| + '''Test all of the twitter.Status properties''' | ||
| + status = twitter.Status() | ||
| + status.id = 1 | ||
| + self.assertEqual(1, status.id) | ||
| + created_at = calendar.timegm((2007, 1, 26, 23, 17, 14, -1, -1, -1)) | ||
| + status.created_at = 'Fri Jan 26 23:17:14 +0000 2007' | ||
| + self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', status.created_at) | ||
| + self.assertEqual(created_at, status.created_at_in_seconds) | ||
| + status.now = created_at + 10 | ||
| + self.assertEqual('about 10 seconds ago', status.relative_created_at) | ||
| + status.user = self._GetSampleUser() | ||
| + self.assertEqual(718443, status.user.id) | ||
| + | ||
| + def _ParseDate(self, string): | ||
| + return calendar.timegm(time.strptime(string, '%b %d %H:%M:%S %Y')) | ||
| + | ||
| + def testRelativeCreatedAt(self): | ||
| + '''Test various permutations of Status relative_created_at''' | ||
| + status = twitter.Status(created_at='Fri Jan 01 12:00:00 +0000 2007') | ||
| + status.now = self._ParseDate('Jan 01 12:00:00 2007') | ||
| + self.assertEqual('about a second ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:00:01 2007') | ||
| + self.assertEqual('about a second ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:00:02 2007') | ||
| + self.assertEqual('about 2 seconds ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:00:05 2007') | ||
| + self.assertEqual('about 5 seconds ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:00:50 2007') | ||
| + self.assertEqual('about a minute ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:01:00 2007') | ||
| + self.assertEqual('about a minute ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:01:10 2007') | ||
| + self.assertEqual('about a minute ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:02:00 2007') | ||
| + self.assertEqual('about 2 minutes ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:31:50 2007') | ||
| + self.assertEqual('about 31 minutes ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 12:50:00 2007') | ||
| + self.assertEqual('about an hour ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 13:00:00 2007') | ||
| + self.assertEqual('about an hour ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 13:10:00 2007') | ||
| + self.assertEqual('about an hour ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 14:00:00 2007') | ||
| + self.assertEqual('about 2 hours ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 01 19:00:00 2007') | ||
| + self.assertEqual('about 7 hours ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 02 11:30:00 2007') | ||
| + self.assertEqual('about a day ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Jan 04 12:00:00 2007') | ||
| + self.assertEqual('about 3 days ago', status.relative_created_at) | ||
| + status.now = self._ParseDate('Feb 04 12:00:00 2007') | ||
| + self.assertEqual('about 34 days ago', status.relative_created_at) | ||
| + | ||
| + def testAsJsonString(self): | ||
| + '''Test the twitter.Status AsJsonString method''' | ||
| + self.assertEqual(StatusTest.SAMPLE_JSON, | ||
| + self._GetSampleStatus().AsJsonString()) | ||
| + | ||
| + def testAsDict(self): | ||
| + '''Test the twitter.Status AsDict method''' | ||
| + status = self._GetSampleStatus() | ||
| + data = status.AsDict() | ||
| + self.assertEqual(4391023, data['id']) | ||
| + self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', data['created_at']) | ||
| + self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', data['text']) | ||
| + self.assertEqual(718443, data['user']['id']) | ||
| + | ||
| + def testEq(self): | ||
| + '''Test the twitter.Status __eq__ method''' | ||
| + status = twitter.Status() | ||
| + status.created_at = 'Fri Jan 26 23:17:14 +0000 2007' | ||
| + status.id = 4391023 | ||
| + status.text = u'A légpárnás hajóm tele van angolnákkal.' | ||
| + status.user = self._GetSampleUser() | ||
| + self.assertEqual(status, self._GetSampleStatus()) | ||
| + | ||
| + def testNewFromJsonDict(self): | ||
| + '''Test the twitter.Status NewFromJsonDict method''' | ||
| + data = simplejson.loads(StatusTest.SAMPLE_JSON) | ||
| + status = twitter.Status.NewFromJsonDict(data) | ||
| + self.assertEqual(self._GetSampleStatus(), status) | ||
| + | ||
| +class UserTest(unittest.TestCase): | ||
| + | ||
| + SAMPLE_JSON = '''{"description": "Indeterminate things", "id": 673483, "location": "San Francisco, CA", "name": "DeWitt", "profile_image_url": "https://twitter.com/system/user/profile_image/673483/normal/me.jpg", "screen_name": "dewitt", "status": {"created_at": "Fri Jan 26 17:28:19 +0000 2007", "id": 4212713, "text": "\\"Select all\\" and archive your Gmail inbox. The page loads so much faster!"}, "url": "http://unto.net/"}''' | ||
| + | ||
| + def _GetSampleStatus(self): | ||
| + return twitter.Status(created_at='Fri Jan 26 17:28:19 +0000 2007', | ||
| + id=4212713, | ||
| + text='"Select all" and archive your Gmail inbox. ' | ||
| + ' The page loads so much faster!') | ||
| + | ||
| + def _GetSampleUser(self): | ||
| + return twitter.User(id=673483, | ||
| + name='DeWitt', | ||
| + screen_name='dewitt', | ||
| + description=u'Indeterminate things', | ||
| + location='San Francisco, CA', | ||
| + url='http://unto.net/', | ||
| + profile_image_url='https://twitter.com/system/user/prof' | ||
| + 'ile_image/673483/normal/me.jpg', | ||
| + status=self._GetSampleStatus()) | ||
| + | ||
| + | ||
| + | ||
| + def testInit(self): | ||
| + '''Test the twitter.User constructor''' | ||
| + user = twitter.User(id=673483, | ||
| + name='DeWitt', | ||
| + screen_name='dewitt', | ||
| + description=u'Indeterminate things', | ||
| + url='https://twitter.com/dewitt', | ||
| + profile_image_url='https://twitter.com/system/user/prof' | ||
| + 'ile_image/673483/normal/me.jpg', | ||
| + status=self._GetSampleStatus()) | ||
| + | ||
| + def testGettersAndSetters(self): | ||
| + '''Test all of the twitter.User getters and setters''' | ||
| + user = twitter.User() | ||
| + user.SetId(673483) | ||
| + self.assertEqual(673483, user.GetId()) | ||
| + user.SetName('DeWitt') | ||
| + self.assertEqual('DeWitt', user.GetName()) | ||
| + user.SetScreenName('dewitt') | ||
| + self.assertEqual('dewitt', user.GetScreenName()) | ||
| + user.SetDescription('Indeterminate things') | ||
| + self.assertEqual('Indeterminate things', user.GetDescription()) | ||
| + user.SetLocation('San Francisco, CA') | ||
| + self.assertEqual('San Francisco, CA', user.GetLocation()) | ||
| + user.SetProfileImageUrl('https://twitter.com/system/user/profile_im' | ||
| + 'age/673483/normal/me.jpg') | ||
| + self.assertEqual('https://twitter.com/system/user/profile_image/673' | ||
| + '483/normal/me.jpg', user.GetProfileImageUrl()) | ||
| + user.SetStatus(self._GetSampleStatus()) | ||
| + self.assertEqual(4212713, user.GetStatus().id) | ||
| + | ||
| + def testProperties(self): | ||
| + '''Test all of the twitter.User properties''' | ||
| + user = twitter.User() | ||
| + user.id = 673483 | ||
| + self.assertEqual(673483, user.id) | ||
| + user.name = 'DeWitt' | ||
| + self.assertEqual('DeWitt', user.name) | ||
| + user.screen_name = 'dewitt' | ||
| + self.assertEqual('dewitt', user.screen_name) | ||
| + user.description = 'Indeterminate things' | ||
| + self.assertEqual('Indeterminate things', user.description) | ||
| + user.location = 'San Francisco, CA' | ||
| + self.assertEqual('San Francisco, CA', user.location) | ||
| + user.profile_image_url = 'https://twitter.com/system/user/profile_i' \ | ||
| + 'mage/673483/normal/me.jpg' | ||
| + self.assertEqual('https://twitter.com/system/user/profile_image/6734' | ||
| + '83/normal/me.jpg', user.profile_image_url) | ||
| + self.status = self._GetSampleStatus() | ||
| + self.assertEqual(4212713, self.status.id) | ||
| + | ||
| + def testAsJsonString(self): | ||
| + '''Test the twitter.User AsJsonString method''' | ||
| + self.assertEqual(UserTest.SAMPLE_JSON, | ||
| + self._GetSampleUser().AsJsonString()) | ||
| + | ||
| + def testAsDict(self): | ||
| + '''Test the twitter.User AsDict method''' | ||
| + user = self._GetSampleUser() | ||
| + data = user.AsDict() | ||
| + self.assertEqual(673483, data['id']) | ||
| + self.assertEqual('DeWitt', data['name']) | ||
| + self.assertEqual('dewitt', data['screen_name']) | ||
| + self.assertEqual('Indeterminate things', data['description']) | ||
| + self.assertEqual('San Francisco, CA', data['location']) | ||
| + self.assertEqual('https://twitter.com/system/user/profile_image/6734' | ||
| + '83/normal/me.jpg', data['profile_image_url']) | ||
| + self.assertEqual('http://unto.net/', data['url']) | ||
| + self.assertEqual(4212713, data['status']['id']) | ||
| + | ||
| + def testEq(self): | ||
| + '''Test the twitter.User __eq__ method''' | ||
| + user = twitter.User() | ||
| + user.id = 673483 | ||
| + user.name = 'DeWitt' | ||
| + user.screen_name = 'dewitt' | ||
| + user.description = 'Indeterminate things' | ||
| + user.location = 'San Francisco, CA' | ||
| + user.profile_image_url = 'https://twitter.com/system/user/profile_image/67' \ | ||
| + '3483/normal/me.jpg' | ||
| + user.url = 'http://unto.net/' | ||
| + user.status = self._GetSampleStatus() | ||
| + self.assertEqual(user, self._GetSampleUser()) | ||
| + | ||
| + def testNewFromJsonDict(self): | ||
| + '''Test the twitter.User NewFromJsonDict method''' | ||
| + data = simplejson.loads(UserTest.SAMPLE_JSON) | ||
| + user = twitter.User.NewFromJsonDict(data) | ||
| + self.assertEqual(self._GetSampleUser(), user) | ||
| + | ||
| +class TrendTest(unittest.TestCase): | ||
| + | ||
| + SAMPLE_JSON = '''{"name": "Kesuke Miyagi", "query": "Kesuke Miyagi"}''' | ||
| + | ||
| + def _GetSampleTrend(self): | ||
| + return twitter.Trend(name='Kesuke Miyagi', | ||
| + query='Kesuke Miyagi', | ||
| + timestamp='Fri Jan 26 23:17:14 +0000 2007') | ||
| + | ||
| + def testInit(self): | ||
| + '''Test the twitter.Trend constructor''' | ||
| + trend = twitter.Trend(name='Kesuke Miyagi', | ||
| + query='Kesuke Miyagi', | ||
| + timestamp='Fri Jan 26 23:17:14 +0000 2007') | ||
| + | ||
| + def testProperties(self): | ||
| + '''Test all of the twitter.Trend properties''' | ||
| + trend = twitter.Trend() | ||
| + trend.name = 'Kesuke Miyagi' | ||
| + self.assertEqual('Kesuke Miyagi', trend.name) | ||
| + trend.query = 'Kesuke Miyagi' | ||
| + self.assertEqual('Kesuke Miyagi', trend.query) | ||
| + trend.timestamp = 'Fri Jan 26 23:17:14 +0000 2007' | ||
| + self.assertEqual('Fri Jan 26 23:17:14 +0000 2007', trend.timestamp) | ||
| + | ||
| + def testNewFromJsonDict(self): | ||
| + '''Test the twitter.Trend NewFromJsonDict method''' | ||
| + data = simplejson.loads(TrendTest.SAMPLE_JSON) | ||
| + trend = twitter.Trend.NewFromJsonDict(data, timestamp='Fri Jan 26 23:17:14 +0000 2007') | ||
| + self.assertEqual(self._GetSampleTrend(), trend) | ||
| + | ||
| + def testEq(self): | ||
| + '''Test the twitter.Trend __eq__ method''' | ||
| + trend = twitter.Trend() | ||
| + trend.name = 'Kesuke Miyagi' | ||
| + trend.query = 'Kesuke Miyagi' | ||
| + trend.timestamp = 'Fri Jan 26 23:17:14 +0000 2007' | ||
| + self.assertEqual(trend, self._GetSampleTrend()) | ||
| + | ||
| +class FileCacheTest(unittest.TestCase): | ||
| + | ||
| + def testInit(self): | ||
| + """Test the twitter._FileCache constructor""" | ||
| + cache = twitter._FileCache() | ||
| + self.assert_(cache is not None, 'cache is None') | ||
| + | ||
| + def testSet(self): | ||
| + """Test the twitter._FileCache.Set method""" | ||
| + cache = twitter._FileCache() | ||
| + cache.Set("foo",'Hello World!') | ||
| + cache.Remove("foo") | ||
| + | ||
| + def testRemove(self): | ||
| + """Test the twitter._FileCache.Remove method""" | ||
| + cache = twitter._FileCache() | ||
| + cache.Set("foo",'Hello World!') | ||
| + cache.Remove("foo") | ||
| + data = cache.Get("foo") | ||
| + self.assertEqual(data, None, 'data is not None') | ||
| + | ||
| + def testGet(self): | ||
| + """Test the twitter._FileCache.Get method""" | ||
| + cache = twitter._FileCache() | ||
| + cache.Set("foo",'Hello World!') | ||
| + data = cache.Get("foo") | ||
| + self.assertEqual('Hello World!', data) | ||
| + cache.Remove("foo") | ||
| + | ||
| + def testGetCachedTime(self): | ||
| + """Test the twitter._FileCache.GetCachedTime method""" | ||
| + now = time.time() | ||
| + cache = twitter._FileCache() | ||
| + cache.Set("foo",'Hello World!') | ||
| + cached_time = cache.GetCachedTime("foo") | ||
| + delta = cached_time - now | ||
| + self.assert_(delta <= 1, | ||
| + 'Cached time differs from clock time by more than 1 second.') | ||
| + cache.Remove("foo") | ||
| + | ||
| +class ApiTest(unittest.TestCase): | ||
| + | ||
| + def setUp(self): | ||
| + self._urllib = MockUrllib() | ||
| + api = twitter.Api(consumer_key='CONSUMER_KEY', | ||
| + consumer_secret='CONSUMER_SECRET', | ||
| + access_token_key='OAUTH_TOKEN', | ||
| + access_token_secret='OAUTH_SECRET', | ||
| + cache=None) | ||
| + api.SetUrllib(self._urllib) | ||
| + self._api = api | ||
| + | ||
| + def testTwitterError(self): | ||
| + '''Test that twitter responses containing an error message are wrapped.''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/public_timeline.json', | ||
| + curry(self._OpenTestData, 'public_timeline_error.json')) | ||
| + # Manually try/catch so we can check the exception's value | ||
| + try: | ||
| + statuses = self._api.GetPublicTimeline() | ||
| + except twitter.TwitterError, error: | ||
| + # If the error message matches, the test passes | ||
| + self.assertEqual('test error', error.message) | ||
| + else: | ||
| + self.fail('TwitterError expected') | ||
| + | ||
| + def testGetPublicTimeline(self): | ||
| + '''Test the twitter.Api GetPublicTimeline method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/public_timeline.json?since_id=12345', | ||
| + curry(self._OpenTestData, 'public_timeline.json')) | ||
| + statuses = self._api.GetPublicTimeline(since_id=12345) | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(20, len(statuses)) | ||
| + self.assertEqual(89497702, statuses[0].id) | ||
| + | ||
| + def testGetUserTimeline(self): | ||
| + '''Test the twitter.Api GetUserTimeline method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/user_timeline/kesuke.json?count=1', | ||
| + curry(self._OpenTestData, 'user_timeline-kesuke.json')) | ||
| + statuses = self._api.GetUserTimeline('kesuke', count=1) | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(89512102, statuses[0].id) | ||
| + self.assertEqual(718443, statuses[0].user.id) | ||
| + | ||
| + def testGetFriendsTimeline(self): | ||
| + '''Test the twitter.Api GetFriendsTimeline method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/friends_timeline/kesuke.json', | ||
| + curry(self._OpenTestData, 'friends_timeline-kesuke.json')) | ||
| + statuses = self._api.GetFriendsTimeline('kesuke') | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(20, len(statuses)) | ||
| + self.assertEqual(718443, statuses[0].user.id) | ||
| + | ||
| + def testGetStatus(self): | ||
| + '''Test the twitter.Api GetStatus method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/show/89512102.json', | ||
| + curry(self._OpenTestData, 'show-89512102.json')) | ||
| + status = self._api.GetStatus(89512102) | ||
| + self.assertEqual(89512102, status.id) | ||
| + self.assertEqual(718443, status.user.id) | ||
| + | ||
| + def testDestroyStatus(self): | ||
| + '''Test the twitter.Api DestroyStatus method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/destroy/103208352.json', | ||
| + curry(self._OpenTestData, 'status-destroy.json')) | ||
| + status = self._api.DestroyStatus(103208352) | ||
| + self.assertEqual(103208352, status.id) | ||
| + | ||
| + def testPostUpdate(self): | ||
| + '''Test the twitter.Api PostUpdate method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/update.json', | ||
| + curry(self._OpenTestData, 'update.json')) | ||
| + status = self._api.PostUpdate(u'Моё судно на воздушной подушке полно угрей'.encode('utf8')) | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(u'Моё судно на воздушной подушке полно угрей', status.text) | ||
| + | ||
| + def testGetReplies(self): | ||
| + '''Test the twitter.Api GetReplies method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/replies.json?page=1', | ||
| + curry(self._OpenTestData, 'replies.json')) | ||
| + statuses = self._api.GetReplies(page=1) | ||
| + self.assertEqual(36657062, statuses[0].id) | ||
| + | ||
| + def testGetFriends(self): | ||
| + '''Test the twitter.Api GetFriends method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/friends.json?cursor=123', | ||
| + curry(self._OpenTestData, 'friends.json')) | ||
| + users = self._api.GetFriends(cursor=123) | ||
| + buzz = [u.status for u in users if u.screen_name == 'buzz'] | ||
| + self.assertEqual(89543882, buzz[0].id) | ||
| + | ||
| + def testGetFollowers(self): | ||
| + '''Test the twitter.Api GetFollowers method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/followers.json?page=1', | ||
| + curry(self._OpenTestData, 'followers.json')) | ||
| + users = self._api.GetFollowers(page=1) | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + alexkingorg = [u.status for u in users if u.screen_name == 'alexkingorg'] | ||
| + self.assertEqual(89554432, alexkingorg[0].id) | ||
| + | ||
| + def testGetFeatured(self): | ||
| + '''Test the twitter.Api GetFeatured method''' | ||
| + self._AddHandler('https://api.twitter.com/1/statuses/featured.json', | ||
| + curry(self._OpenTestData, 'featured.json')) | ||
| + users = self._api.GetFeatured() | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + stevenwright = [u.status for u in users if u.screen_name == 'stevenwright'] | ||
| + self.assertEqual(86991742, stevenwright[0].id) | ||
| + | ||
| + def testGetDirectMessages(self): | ||
| + '''Test the twitter.Api GetDirectMessages method''' | ||
| + self._AddHandler('https://api.twitter.com/1/direct_messages.json?page=1', | ||
| + curry(self._OpenTestData, 'direct_messages.json')) | ||
| + statuses = self._api.GetDirectMessages(page=1) | ||
| + self.assertEqual(u'A légpárnás hajóm tele van angolnákkal.', statuses[0].text) | ||
| + | ||
| + def testPostDirectMessage(self): | ||
| + '''Test the twitter.Api PostDirectMessage method''' | ||
| + self._AddHandler('https://api.twitter.com/1/direct_messages/new.json', | ||
| + curry(self._OpenTestData, 'direct_messages-new.json')) | ||
| + status = self._api.PostDirectMessage('test', u'Моё судно на воздушной подушке полно угрей'.encode('utf8')) | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(u'Моё судно на воздушной подушке полно угрей', status.text) | ||
| + | ||
| + def testDestroyDirectMessage(self): | ||
| + '''Test the twitter.Api DestroyDirectMessage method''' | ||
| + self._AddHandler('https://api.twitter.com/1/direct_messages/destroy/3496342.json', | ||
| + curry(self._OpenTestData, 'direct_message-destroy.json')) | ||
| + status = self._api.DestroyDirectMessage(3496342) | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(673483, status.sender_id) | ||
| + | ||
| + def testCreateFriendship(self): | ||
| + '''Test the twitter.Api CreateFriendship method''' | ||
| + self._AddHandler('https://api.twitter.com/1/friendships/create/dewitt.json', | ||
| + curry(self._OpenTestData, 'friendship-create.json')) | ||
| + user = self._api.CreateFriendship('dewitt') | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(673483, user.id) | ||
| + | ||
| + def testDestroyFriendship(self): | ||
| + '''Test the twitter.Api DestroyFriendship method''' | ||
| + self._AddHandler('https://api.twitter.com/1/friendships/destroy/dewitt.json', | ||
| + curry(self._OpenTestData, 'friendship-destroy.json')) | ||
| + user = self._api.DestroyFriendship('dewitt') | ||
| + # This is rather arbitrary, but spot checking is better than nothing | ||
| + self.assertEqual(673483, user.id) | ||
| + | ||
| + def testGetUser(self): | ||
| + '''Test the twitter.Api GetUser method''' | ||
| + self._AddHandler('https://api.twitter.com/1/users/show/dewitt.json', | ||
| + curry(self._OpenTestData, 'show-dewitt.json')) | ||
| + user = self._api.GetUser('dewitt') | ||
| + self.assertEqual('dewitt', user.screen_name) | ||
| + self.assertEqual(89586072, user.status.id) | ||
| + | ||
| + def _AddHandler(self, url, callback): | ||
| + self._urllib.AddHandler(url, callback) | ||
| + | ||
| + def _GetTestDataPath(self, filename): | ||
| + directory = os.path.dirname(os.path.abspath(__file__)) | ||
| + test_data_dir = os.path.join(directory, 'testdata') | ||
| + return os.path.join(test_data_dir, filename) | ||
| + | ||
| + def _OpenTestData(self, filename): | ||
| + f = open(self._GetTestDataPath(filename)) | ||
| + # make sure that the returned object contains an .info() method: | ||
| + # headers are set to {} | ||
| + return urllib.addinfo(f, {}) | ||
| + | ||
| +class MockUrllib(object): | ||
| + '''A mock replacement for urllib that hardcodes specific responses.''' | ||
| + | ||
| + def __init__(self): | ||
| + self._handlers = {} | ||
| + self.HTTPBasicAuthHandler = MockHTTPBasicAuthHandler | ||
| + | ||
| + def AddHandler(self, url, callback): | ||
| + self._handlers[url] = callback | ||
| + | ||
| + def build_opener(self, *handlers): | ||
| + return MockOpener(self._handlers) | ||
| + | ||
| + def HTTPHandler(self, *args, **kwargs): | ||
| + return None | ||
| + | ||
| + def HTTPSHandler(self, *args, **kwargs): | ||
| + return None | ||
| + | ||
| + def OpenerDirector(self): | ||
| + return self.build_opener() | ||
| + | ||
| +class MockOpener(object): | ||
| + '''A mock opener for urllib''' | ||
| + | ||
| + def __init__(self, handlers): | ||
| + self._handlers = handlers | ||
| + self._opened = False | ||
| + | ||
| + def open(self, url, data=None): | ||
| + if self._opened: | ||
| + raise Exception('MockOpener already opened.') | ||
| + | ||
| + # Remove parameters from URL - they're only added by oauth and we | ||
| + # don't want to test oauth | ||
| + if '?' in url: | ||
| + # We split using & and filter on the beginning of each key | ||
| + # This is crude but we have to keep the ordering for now | ||
| + (url, qs) = url.split('?') | ||
| + | ||
| + tokens = [token for token in qs.split('&') | ||
| + if not token.startswith('oauth')] | ||
| + | ||
| + if len(tokens) > 0: | ||
| + url = "%s?%s"%(url, '&'.join(tokens)) | ||
| + | ||
| + if url in self._handlers: | ||
| + self._opened = True | ||
| + return self._handlers[url]() | ||
| + else: | ||
| + raise Exception('Unexpected URL %s (Checked: %s)' % (url, self._handlers)) | ||
| + | ||
| + def add_handler(self, *args, **kwargs): | ||
| + pass | ||
| + | ||
| + def close(self): | ||
| + if not self._opened: | ||
| + raise Exception('MockOpener closed before it was opened.') | ||
| + self._opened = False | ||
| + | ||
| +class MockHTTPBasicAuthHandler(object): | ||
| + '''A mock replacement for HTTPBasicAuthHandler''' | ||
| + | ||
| + def add_password(self, realm, uri, user, passwd): | ||
| + # TODO(dewitt): Add verification that the proper args are passed | ||
| + pass | ||
| + | ||
| +class curry: | ||
| + # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52549 | ||
| + | ||
| + def __init__(self, fun, *args, **kwargs): | ||
| + self.fun = fun | ||
| + self.pending = args[:] | ||
| + self.kwargs = kwargs.copy() | ||
| + | ||
| + def __call__(self, *args, **kwargs): | ||
| + if kwargs and self.kwargs: | ||
| + kw = self.kwargs.copy() | ||
| + kw.update(kwargs) | ||
| + else: | ||
| + kw = kwargs or self.kwargs | ||
| + return self.fun(*(self.pending + args), **kw) | ||
| + | ||
| + | ||
| +def suite(): | ||
| + suite = unittest.TestSuite() | ||
| + suite.addTests(unittest.makeSuite(FileCacheTest)) | ||
| + suite.addTests(unittest.makeSuite(StatusTest)) | ||
| + suite.addTests(unittest.makeSuite(UserTest)) | ||
| + suite.addTests(unittest.makeSuite(ApiTest)) | ||
| + return suite | ||
| + | ||
| +if __name__ == '__main__': | ||
| + unittest.main() |
0 comments on commit
dbe768a