diff --git a/.HA_VERSION b/.HA_VERSION new file mode 100755 index 00000000..6a932058 --- /dev/null +++ b/.HA_VERSION @@ -0,0 +1 @@ +0.30.1 \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100755 index 00000000..8722ada0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +*.pid +*.xml +OZW_Log.txt +home-assistant.log +home-assistant_v2.db +*.db-journal +lib +secrets.yaml +known_devices.yaml +phue.conf +pyozw.sqlite + diff --git a/README.md b/README.md old mode 100644 new mode 100755 index 556888b1..b7d82491 --- a/README.md +++ b/README.md @@ -1,2 +1,21 @@ # Home-AssistantConfig Home Assistant configuration files (YAMLs) + +This is my Home Assistant Configuration. I update it pretty regularly. +Home Assistantruns on my Raspberry Pi 3 with Z Wave Stick. I've also added a 433Mhz Transmitter and receiver. + +Software on the Pi : Home Assistant, Dasher, HomeBridge + +Devices I have : +* Lots of iOS Devices +* Nest Thermostat +* Amazon Echo +* Phillips Hue Hub +* Circle by Disney +* Rachio Sprinkler system +* SkyBell HD +* Rokus for all streaming +* ChromeCast Audios +* Etekcity Outlets +* Amazon Dash Buttons + diff --git a/configuration.yaml b/configuration.yaml new file mode 100755 index 00000000..cadc5fd6 --- /dev/null +++ b/configuration.yaml @@ -0,0 +1,528 @@ +homeassistant: + name: Bear Stone Run + latitude: !secret homeassistant_latitude + longitude: !secret homeassistant_longitude + elevation: !secret homeassistant_elevation + unit_system: imperial + time_zone: America/New_York + + customize: + climate.downstairs: + friendly_name: 'Nest Downstairs' + icon: mdi:air-conditioner + climate.upstairs: + friendly_name: 'Nest Upstairs' + icon: mdi:air-conditioner + sensor.dark_sky_precip_intensity: + friendly_name: 'Rainfall' + sensor.dark_sky_humidity: + friendly_name: 'Outdoor Humidity' + sensor.dark_sky_temperature: + friendly_name: 'Outdoor Temp' + sensor.speedtest_download: + friendly_name: 'Download' + icon: mdi:speedometer + sensor.speedtest_upload: + friendly_name: 'Upload' + icon: mdi:speedometer + media_player.roku_1gj361038190: + friendly_name: 'Downstairs Main Roku' + media_player.roku_1gs3ac111661: + friendly_name: 'Upstairs Roku' + media_player.roku_2N006T621680: + friendly_name: 'Bedroom Roku' + sensor.wii: + icon: mdi:gamepad-variant + sensor.tablotv: + icon: mdi:television-guide + sensor.hue_hub: + icon: mdi:router-wireless + sensor.rachio: + icon: mdi:spray + sensor.circle: + icon: mdi:google-circles-group + sensor.alexa_echo: + icon: mdi:amazon + sensor.skybell: + icon: mdi:camera-front + sensor.samsungtv: + icon: mdi:television + sensor.since_last_boot_templated: + friendly_name: 'HomeAssistant Uptime' + icon: mdi:clock-start + sensor.since_last_boot: + hidden: true + sensor.badlogin: + hidden: true + device_tracker.tablotv: + hidden: true + device_tracker.hue_hub: + hidden: true + device_tracker.wii: + hidden: true + device_tracker.rachio: + hidden: true + device_tracker.circle: + hidden: true + device_tracker.alexa_echo: + hidden: true + device_tracker.skybell: + hidden: true + device_tracker.samsungtv: + hidden: true + +http: + api_password: !secret http_api_password + +discovery: + +light: + platform: hue + host: 192.168.10.101 + allow_unreachable: true + +ifttt: + key: !secret ifttt_key + +device_tracker: + platform: nmap_tracker + hosts: 192.168.10.100-254 + track_new_devices: no + # consider_home: 1800 - Added to known_devices.yaml instead on a device by device basis. + +frontend: +# history: +logbook: + exclude: + entities: + - sensor.since_last_boot + - sensor.since_last_boot_templated +#logger: +# default: info + +sun: + +mqtt: + broker: 127.0.0.1 + port: 1883 + client_id: home-assistant-1 + username: pi + password: raspberry + +zwave: + usb_path: /dev/ttyACM0 + config_path: /srv/hass/hass_venv/lib/python3.4/site-packages/libopenzwave-0.3.1-py3.4-linux-armv7l.egg/config + +sensor forecast: + platform: darksky + api_key: !secret forecast_key + + monitored_conditions: + - summary +# - precip_type + - precip_intensity + - temperature +# - dew_point +# - wind_speed +# - wind_bearing +# - cloud_cover + - humidity +# - pressure +# - visibility +# - ozone + +nest: + username: !secret nest_username + password: !secret nest_password + +climate: + platform: nest + +media_player: + platform: cast + +recorder: + purge_days: 14 + +sensor Speedtest: + platform: speedtest + minute: 30 + hour: + - 0 + - 6 + - 12 + - 18 + monitored_conditions: + - download + - upload + +# emulated_hue: + # host_ip: 192.168.10.10 + # listen_port: 8300 +# off_maps_to_on_domains: +# - script +# - scene + +# A comma separated list of states that have to be tracked as a single group +# Grouped states should share the same type of states (ON/OFF or HOME/NOT_HOME) +group: + Family: + - device_tracker.carlo + - device_tracker.stacey + - device_tracker.franco + - device_tracker.yolanda + - device_tracker.joyce_ipad + Devices: + - sensor.since_last_boot_templated + - sensor.wii + - sensor.hue_hub + - sensor.tablotv + - sensor.alexa_echo + - sensor.circle + - sensor.rachio + - sensor.skybell + - sensor.samsungtv + Internet: + - sensor.speedtest_download + - sensor.speedtest_upload + Sensors: + - binary_sensor._sensor_2 + - binary_sensor._sensor_3 + - binary_sensor._sensor_5 + - binary_sensor.aeotec_dsb04100_doorwindow_sensor_sensor_4 + - binary_sensor.aeotec_dsb04100_doorwindow_sensor_sensor_6 + Nest: + - climate.downstairs + - climate.upstairs + +switch: + platform: rpi_rf + gpio: 17 + switches: + Outlet_Living_Room: + protocol: 1 + pulselength: 186 + code_on: 5265155 + code_off: 5265164 + Outlet_2: + protocol: 1 + pulselength: 186 + code_on: 5264835 + code_off: 5264844 + Outlet_Garage: + protocol: 1 + pulselength: 186 + code_on: 5264691 + code_off: 5264700 + +sensor Devices: + platform: template + sensors: + wii: + friendly_name: 'Wii' + value_template: >- + {%- if is_state("device_tracker.wii", "home") %} + Online + {% else %} + Offline + {%- endif %} + tablotv: + friendly_name: 'Tablo TV' + value_template: >- + {%- if is_state("device_tracker.tablotv", "home") %} + Online + {% else %} + Offline + {%- endif %} + hue_hub: + friendly_name: 'Hue Hub' + value_template: >- + {%- if is_state("device_tracker.hue_hub", "home") %} + Online + {% else %} + Offline + {%- endif %} + alexa_echo: + friendly_name: 'Alexa Echo' + value_template: >- + {%- if is_state("device_tracker.alexa_echo", "home") %} + Online + {% else %} + Offline + {%- endif %} + circle: + friendly_name: 'Disney Circle' + value_template: >- + {%- if is_state("device_tracker.circle", "home") %} + Online + {% else %} + Offline + {%- endif %} + rachio: + friendly_name: 'Rachio Sprinklers' + value_template: >- + {%- if is_state("device_tracker.rachio", "home") %} + Online + {% else %} + Offline + {%- endif %} + skybell: + friendly_name: 'Skybell Doorbell' + value_template: >- + {%- if is_state("device_tracker.skybell", "home") %} + Online + {% else %} + Offline + {%- endif %} + samsungtv: + friendly_name: 'Samsung TV' + value_template: >- + {%- if is_state("device_tracker.samsungtv", "home") %} + Online + {% else %} + Offline + {%- endif %} + + +sensor Login_Failures: + platform: command_line + command: "grep -c invalidpassword /home/hass/.homeassistant/home-assistant.log" + name: badlogin + +sensor: +- platform: systemmonitor + resources: + # - type: last_boot + - type: since_last_boot + +- platform: template + sensors: + since_last_boot_templated: + value_template: >- + {%- set slb = states.sensor.since_last_boot.state.split(' ') -%} + {%- set count = slb | length -%} + {%- set hms = slb[count - 1] -%} + {%- set hms_trimmed = hms.split('.')[0] -%} + {%- set hms_split = hms_trimmed.split(':') -%} + {%- set hours = hms_split[0] | int -%} + {%- set minutes = hms_split[1] | int -%} + {%- set seconds = hms_split[2] | int -%} + + {%- if count == 3 -%} + {{ slb[0] ~ ' ' ~ slb[1] ~ ' ' }} + {%- endif -%} + {%- if hours > 0 -%} + {%- if hours == 1 -%} + 1 hour + {%- else -%} + {{ hours }} hours + {%- endif -%} + {%- endif -%} + {%- if minutes > 0 -%} + {%- if hours > 0 -%} + {{ ', ' }} + {%- endif -%} + {%- if minutes == 1 -%} + 1 minute + {%- else -%} + {{ minutes }} minutes + {%- endif -%} + {%- endif -%} + {%- if seconds > 0 -%} + {%- if hours > 0 or minutes > 0 -%} + {{ ', ' }} + {%- endif -%} + {%- if seconds == 1 -%} + 1 second + {%- else -%} + {{ seconds }} seconds + {%- endif -%} + {%- endif -%} + +automation: +- alias: Heal ZWave Nightly + trigger: + platform: time + after: '2:31:00' + action: + service: zwave.heal_network + +- alias: "Update Available Notification" + trigger: + platform: state + entity_id: updater.updater + action: + service: ifttt.trigger + data: {"event":"device_status", "value1":"Home Assistant Update: ", "value2":"Available"} + +- alias: Login Failure + trigger: + platform: numeric_state + entity_id: sensor.badlogin + above: 1 + action: + service: ifttt.trigger + data: {"event":"device_status", "value1":"Home Assistant Error: ", "value2":"Login Failure Detected"} + +- alias: 'Device Status' +#This recipe sends a POST to IFTTT Maker channel. IFTTT then sends me a SMS Text with "device_status : Wii is Offline." + trigger: + - platform: state + entity_id: + - sensor.wii + - sensor.tablotv + - sensor.hue_hub + - sensor.alexa_echo + - sensor.rachio + - sensor.circle + - sensor.skybell + action: + service: ifttt.trigger + data_template: {"event":"device_status", "value1":"{{ trigger.entity_id.split('.')[1] }}", "value2":"{{ trigger.to_state.state }}"} + +- alias: Startup Notification + trigger: + platform: event + event_type: homeassistant_start + action: + service: ifttt.trigger + data: {"event":"device_status", "value1":"Home Assistant", "value2":"Up and Running"} + +- alias: 'GoodNight - Away Mode' +# There is also an IFTTT recipe that shuts down all lights when Nest goes into Away mode. - event_type= Good_Night + trigger: + - platform: state + entity_id: group.family + state: 'not_home' + - platform: event + event_type: good_night + + condition: + condition: state + entity_id: group.family + state: not_home + + action: + service: light.turn_off + entity_id: group.all_lights + +- alias: ZWave Enerwave Door Sensors Open + trigger: + platform: event + event_type: zwave.node_event + event_data: + object_id: enerwave_unknown_type0601_id0903_2 + basic_level: 255 + action: + service: light.turn_off + entity_id: light.office_lamp + +- alias: ZWave Enerwave Door Sensors Closed + trigger: + platform: event + event_type: zwave.node_event + event_data: + object_id: enerwave_unknown_type0601_id0903_2 + basic_level: 0 + action: + service: light.turn_on + entity_id: light.office_lamp + +- alias: TV Time + trigger: + - platform: sun + event: sunset + offset: '+00:30:00' + - platform: event + event_type: tv_time + + condition: + condition: state + entity_id: group.family + state: home + + action: + service: scene.turn_on + entity_id: scene.living_room_tv_time + +scene: + - name: Living Room TV Time + entities: + light.dinette_light_1: + state: off + transition: 10 + light.dinette_light_2: + state: off + transition: 10 + light.living_room_front_left: + state: off + transition: 10 + light.living_room_front_right: + state: off + transition: 10 + light.living_room_slider: + state: off + transition: 10 + light.living_room_back_right: + state: on + transition: 10 + brightness: 1 + light.living_room_back_left: + state: on + transition: 40 + brightness: 1 + light.living_room_couch_1: + state: on + transition: 400 + xy_color: [0.6621,0.3023] + brightness: 255 + light.living_room_couch_2: + state: on + transition: 400 + xy_color: [0.6621,0.3023] + brightness: 255 + light.couch_tv_light: + state: on + transition: 400 + xy_color: [0.6621,0.3023] + brightness: 100 + + +# Restart Homebridge on HASS start +# shell_command: + # restart_homebridge: 'sudo su pi -c "pm2 restart homebridge"' + # start_homebridge: 'sudo su pi -c "pm2 start homebridge"' + # stop_homebridge: 'sudo su pi -c "pm2 stop homebridge"' + + ### Future Ideas + +# - alias: 'Get Random Time' + # trigger: + # platform: time + # after: '21:00:00' + # action: + # - service: input_slider.select_value + # data_template: + # entity_id: input_slider.hour + # value: '{{ (range(22, 23) | random) }}' + # - service: input_slider.select_value + # data_template: + # entity_id: input_slider.random_minute + # value: '{{ (range(30, 45) | random) }}' +# Then simply use that in your light turn off automation: + +# - alias: 'Turn lights off' + # trigger: + # platform: template + # value_template: '{{ now.hour == (states.input_slider.random_hour.state | int) and now.minute == (states.input_slider.random_minute.state | int) }}' + # action: + # - service: light.turn_off + # data: + # entity_id: light.hue_color_lamp_1 + + +#Todo List + +# AUTOMATE LAMP UPSTAIRS USING ifttt AND Nest thermostat. +# Put Dash Buttons out there. +# Put door sensor on garage door \ No newline at end of file diff --git a/deps/.DS_Store b/deps/.DS_Store new file mode 100644 index 00000000..9104dc9e Binary files /dev/null and b/deps/.DS_Store differ diff --git a/deps/CherryPy-8.1.0.dist-info/DESCRIPTION.rst b/deps/CherryPy-8.1.0.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..0e09c455 --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +CherryPy is a pythonic, object-oriented HTTP framework + + diff --git a/deps/CherryPy-8.1.0.dist-info/INSTALLER b/deps/CherryPy-8.1.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/CherryPy-8.1.0.dist-info/METADATA b/deps/CherryPy-8.1.0.dist-info/METADATA new file mode 100644 index 00000000..dddf27ef --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/METADATA @@ -0,0 +1,44 @@ +Metadata-Version: 2.0 +Name: CherryPy +Version: 8.1.0 +Summary: Object-Oriented HTTP framework +Home-page: http://www.cherrypy.org +Author: CherryPy Team +Author-email: team@cherrypy.org +License: BSD +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: Freely Distributable +Classifier: Operating System :: OS Independent +Classifier: Framework :: CherryPy +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Dist: six +Provides-Extra: This section defines feature flags end-users can use in dependenciesmemcached-session +Requires-Dist: python-memcached (>=1.58); extra == 'This section defines feature flags end-users can use in dependenciesmemcached-session' + +CherryPy is a pythonic, object-oriented HTTP framework + + diff --git a/deps/CherryPy-8.1.0.dist-info/RECORD b/deps/CherryPy-8.1.0.dist-info/RECORD new file mode 100644 index 00000000..a2ba793c --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/RECORD @@ -0,0 +1,242 @@ +../../cherrypy/LICENSE.txt,sha256=IauyrgxBFcHp6BdR1ETnWowNhkQ8WGiakDvwZn4uMaw,1582 +../../cherrypy/cherryd,sha256=NHP2aVxBaufEZ-xOtagQyikGYospC2E53zelFcQG4l8,101 +../../cherrypy/favicon.ico,sha256=jrNK5SnKfbnSFgX_xQyLX3khmeImw8IbbHgJVIsGci0,1406 +../../cherrypy/scaffold/example.conf,sha256=UXZHQHu71WKWzCei-Olo1gUreX8AcoW5exkUSi3bVaI,61 +../../cherrypy/scaffold/site.conf,sha256=pjUhF-ir1xzSsV7LqXGfyR6Ns_r_n3ATWw8OlfbgT3w,426 +../../cherrypy/scaffold/static/made_with_cherrypy_small.png,sha256=-A_xqPsK-9k4-36Uy1Uw4s7ufbs9cm30gQ8fb90JscA,7455 +../../cherrypy/test/style.css,sha256=2Ypw_ziOWlY4dTZJlwsrorDLqLA1z485lgagaGemtKQ,17 +../../cherrypy/test/test.pem,sha256=x6LrLPw2dBRyZwHXk6FhdSDNM3-Cv7DBXc8o4A19RhI,2254 +../../cherrypy/test/static/dirback.jpg,sha256=naVXoM4eMycBnz1jxqlMLFVWkM9-DgFQRIYiCvjrCRo,18238 +../../cherrypy/test/static/index.html,sha256=cB6ALrLhcxEGyMNgOHzmnBvmRnPCW_u3ebZUqdCiHkQ,14 +../../cherrypy/tutorial/README.txt,sha256=6S48iS53EwxP16p7ttB0oDE0H6T_31WVhl5TQOnupuM,703 +../../cherrypy/tutorial/custom_error.html,sha256=9cMEb83zwct9i-fJlyl7yvBSNexF7yEIWOoxH8lpllQ,404 +../../cherrypy/tutorial/pdf_file.pdf,sha256=_4ED6K9omlqDodRtP1G3oagWwF4vqQbT1yjeA2UASgw,85698 +../../cherrypy/tutorial/tutorial.conf,sha256=9ENgfRDyopHuignr_aHeMaWoC562xThbmlgF6zg4oEE,96 +../../bin/cherryd,sha256=yoXgNxNxnyqQb35YPan1JBakxaktUYlclUaIsHsjOWA,87 +CherryPy-8.1.0.dist-info/DESCRIPTION.rst,sha256=-2ZR2c39k2rBpsSnTMQh6aAYchh9RnvriVjF0BmLbOQ,57 +CherryPy-8.1.0.dist-info/METADATA,sha256=yDpbX_a4VGrpOQM5-xdeQ8nVwx0shZbUCYXMoOAmKaE,2028 +CherryPy-8.1.0.dist-info/RECORD,, +CherryPy-8.1.0.dist-info/WHEEL,sha256=rNo05PbNqwnXiIHFsYm0m22u4Zm6YJtugFG2THx4w3g,92 +CherryPy-8.1.0.dist-info/metadata.json,sha256=pFeuWbZb0Q-cH1Q1w9Z9guuvR6ssDsrUWrQgHRgDxSc,1989 +CherryPy-8.1.0.dist-info/top_level.txt,sha256=mOBE-r7Ej1kFrXNKlOj3yY9QfGA6Xkz6vZK7VNJF3YE,9 +cherrypy/__init__.py,sha256=gFcXKfeonIhWgYdgTn2pzYkzYCy2EHuRjqIrKiAkAjw,11891 +cherrypy/__main__.py,sha256=RC6ZmBSxqBfas6Y_BBYduvoTM933wRjqW5g24awvUcg,77 +cherrypy/_cpchecker.py,sha256=EEVtXoJLtCgzh6N1xjX28MpTlvHwWlfK8hKscEPrp2Y,15034 +cherrypy/_cpcompat.py,sha256=as8wX_SQ8y95PNellWx2EuUTXhNb3ddM_qG5CYxfe0o,9716 +cherrypy/_cpconfig.py,sha256=77jqoLwfGIUwitNvCwrMb4ory0TbicTN7PwMbDEwYdE,10048 +cherrypy/_cpdispatch.py,sha256=4X7qD6ujNWZFJQp9fRnur12CZUt0-TjHtpF_9JRiyF4,25303 +cherrypy/_cperror.py,sha256=7eqrhVsyurxGPLh-PcKj187zkccJ6d-7qLYYo8SYCZk,22963 +cherrypy/_cplogging.py,sha256=DJRDGJt1XZH9eSwPud2b30qdLyPwD5-VYMmbcFR47jg,17211 +cherrypy/_cpmodpy.py,sha256=M2RpPP4MUSVT1n5GXsRi4yjsDB4AR-IOPJfp1W8x3fE,11158 +cherrypy/_cpnative_server.py,sha256=919Jn2tGn2kpZhE0zU_S00oqcAnUeu1dey1UJJ9FOZA,5974 +cherrypy/_cpreqbody.py,sha256=LbcCYkIp3VZeuzywySGtTLtlxv9ktI2BEN9VTb06xAE,37427 +cherrypy/_cprequest.py,sha256=BXWubSLvtvRyTTLpj-IhoYl5-d_7fMMfmBnooGxHHvw,37198 +cherrypy/_cpserver.py,sha256=TJ4KXCBeJBfXc5_kte1JkEutHKCIZnP707dtdu3FgGY,8277 +cherrypy/_cpthreadinglocal.py,sha256=kZGe956xtkv9Gp6bkZxYuqUy_MY4WCGSuLHFFWkXWGE,6619 +cherrypy/_cptools.py,sha256=RbMUwU13wdZoQB-R4KqsqaHfoo3e3YmE0nT3fO1Q17s,19426 +cherrypy/_cptree.py,sha256=sbkOdp4uLIjEQrZeCC0wpHV3F-LV2uu7uRMRfDBlZzo,10394 +cherrypy/_cpwsgi.py,sha256=bMHcT8USUtY8L5ReaNhvk2tqqS1o3h8nBngfcDNOZVE,16864 +cherrypy/_cpwsgi_server.py,sha256=qcsgvqm2FjHOzu1N7DSzMk16LlseAM2WFPFVunbW_nA,3023 +cherrypy/_helper.py,sha256=N0lfUrPU1RdfxXIYDpHccZ2LQAM3rNiJnrdxWDM7MSU,10338 +cherrypy/daemon.py,sha256=dqtbnWyMZUrbvLfZItQT5aPyAAWOqmfv6K0_Q_VzSco,3913 +cherrypy/lib/__init__.py,sha256=WeVRLlnimPVlecYFaOawcnlpPYVrkucteVKeHUoBYvY,2393 +cherrypy/lib/auth.py,sha256=WA8TpNvWSoPAL3din7TVceR41uHJj6ZuaryNUEl081c,3224 +cherrypy/lib/auth_basic.py,sha256=vYLRPP97S9jkNVGDeKDyU2lKt8yh-WoizcZu_QlQ_BY,3401 +cherrypy/lib/auth_digest.py,sha256=mNRIiN9-QIuGVWqIQC-cUpbVWmdXdeyMASLaYsCGrR0,14178 +cherrypy/lib/caching.py,sha256=DWovFBAwBmPPAi9jIevTEhvdrAz43uKoW27m7mkdTOI,17147 +cherrypy/lib/covercp.py,sha256=LST3y1SNuVT9v14WMSIAGYerwFxAkVy65SxW9FRm1vI,11598 +cherrypy/lib/cpstats.py,sha256=8mM-x5n3l604kLOHsm7bd3RtTtJaoeuYQo5twQoKG9A,23042 +cherrypy/lib/cptools.py,sha256=o_dyD3_RKqy_Gp1zRZktmPi_HiGYqsanwWmMybduxxk,23756 +cherrypy/lib/encoding.py,sha256=Bydtgx6mAtQzWD_9zEP90-fWUznd6-NllxoWmntJcg0,16240 +cherrypy/lib/gctools.py,sha256=Rxk7SHrHuE1TMoJh48Q5NOmSmc_p8oPeVbuWWfmhayo,7344 +cherrypy/lib/httpauth.py,sha256=qcah2nyHpBshXgwrcpf3FErZbpBp1L3hziYKbV5VlKk,13050 +cherrypy/lib/httputil.py,sha256=noiOj5nW17Ol0RNumkcNO4fIzBw7tDd2Is-fmvIKpsg,17603 +cherrypy/lib/jsontools.py,sha256=Jk79XKP9HZavy7LiEEUXKyg8UYywTnE5c7pvxhb6MLA,3924 +cherrypy/lib/lockfile.py,sha256=xEt_WOr1y7fc2eG2Fj_yAzyRYJi6gUXPNzZpHVfEsSI,3448 +cherrypy/lib/locking.py,sha256=H80gHcrHTret5i_29c7ISzf3_GyaNw2QsicDQbwo2iE,1224 +cherrypy/lib/profiler.py,sha256=GsRoaZW_syTg4xlHZjNd_Od5xppoZRvDAAlkenxSDB8,6489 +cherrypy/lib/reprconf.py,sha256=x9iYaaEXqrj-fqNSCJ5HDyyiPZqEq3S6va2GFrSFeAM,16186 +cherrypy/lib/sessions.py,sha256=QHpfijremktJYm7MIm5UmjXe6TUGFoS62RW7dU8v0hw,30438 +cherrypy/lib/static.py,sha256=8dc32u1c5JJ82WLG1iinS3hpxfM2QMln6Grv7Oa88E8,14960 +cherrypy/lib/xmlrpcutil.py,sha256=QyCVvWReaqs6ZcnqgPfuLIYQyMUfWI2k2w5PMnPxUCw,1608 +cherrypy/process/__init__.py,sha256=_kdPC_Wa66SIY6S4L14mZXSUFHPJK2q79bp6vyS9udk,552 +cherrypy/process/plugins.py,sha256=CXQE3fvNM1d5vehVOH1i6se6PblXqo48JSn-LdFHmmY,26654 +cherrypy/process/servers.py,sha256=9UyrJ___5fJfyoxoqEpGf8p_vnGPtm0B_VWpr8xJbDA,15727 +cherrypy/process/win32.py,sha256=VIilNKDJrGuF4v1u5Ki8soLarKUGiHUdhVI5f-70QBs,5772 +cherrypy/process/wspbus.py,sha256=JgrVeCzKq0MNajC9xW3zoyCHbZsTJrWO6ncZ7VTviT0,17156 +cherrypy/scaffold/__init__.py,sha256=XIb5M6FokgALqxAbrH2vxquWpKJs1xm4Q85ptJ6XK20,1762 +cherrypy/test/__init__.py,sha256=7vswU9gMLQpcAP01bgFC4TCmgfyUe5Z6cGW2POImxeM,692 +cherrypy/test/_test_decorators.py,sha256=HxU9-DiUdrhfzxa2XOZQV2b5suCI6e18hqs15QBXhh4,992 +cherrypy/test/_test_states_demo.py,sha256=8GC746zLbqPQCj6AplYSCa_kCkDDPyh3Tvuwi-Tk3jM,1925 +cherrypy/test/benchmark.py,sha256=S7khs-SMv3KrD6e3LtJplxU4ROp5E_Kf9kfHnputr6E,12536 +cherrypy/test/checkerdemo.py,sha256=VpLy7yTyqt8n3NErv4vFfn7q5xrhSghq0x3AzYT0VIY,1860 +cherrypy/test/helper.py,sha256=E4DilAaMRaVO8ghNBD_Jcm8rFtjPTiRFkVW1lhM8pww,17573 +cherrypy/test/logtest.py,sha256=AnFPL11lFnSlx82xEzMVedalTVO_-wrmRrX06AX83XU,7097 +cherrypy/test/modfastcgi.py,sha256=2T3R9FCcsHVcssHfl3zRon_sQjpEEv_aRR-ZUy8Q-O4,4651 +cherrypy/test/modfcgid.py,sha256=rX57myEw4HmRnSmeiYWnn9M4Gwp1DUie8NGyRzFHoBc,4236 +cherrypy/test/modpy.py,sha256=0UxLfvq81J-0awar-jTZNV1mUv0wFOlWK7A-MI4IkQo,5009 +cherrypy/test/modwsgi.py,sha256=COV2DMPmlLtpQ4TGIXLqy4bAqtHbonhhNrffXBScybI,4832 +cherrypy/test/sessiondemo.py,sha256=GJCcNPb_-a_1dush4vGOotP7WjfPwweA4kJ6LWzyqio,5447 +cherrypy/test/test_auth_basic.py,sha256=SCznMKX1dZaz_az_vs2381WV9vsYzRGJ-JgEBL8vyqg,3039 +cherrypy/test/test_auth_digest.py,sha256=xipRY65noqfIuSeayk33xOZHCjhkmbH_ajtzhXnHZSE,5066 +cherrypy/test/test_bus.py,sha256=sZmIGf693GPTqQFzNp4R4qLBU5iJa9GrUnicR9zA2hQ,9007 +cherrypy/test/test_caching.py,sha256=IqiwMX3vlP6GPxlBcAKGWTkiOP59KKgNuQzpz-LqXb0,12587 +cherrypy/test/test_compat.py,sha256=CEAwfFJjK6bXDnRmGlj-vhWeHH4Nl63YRZ8yhRuIZ68,474 +cherrypy/test/test_config.py,sha256=1lcGqzylyBma8Itt3yML2XcZWlpeI3sOAR2g9XAEBiU,9179 +cherrypy/test/test_config_server.py,sha256=V26tL8mpRmzG2ZosatDGiRzo47vp4MbbW6toIQzaZLI,4036 +cherrypy/test/test_conn.py,sha256=CJK0RXKG2dw4ai9i29jzY-J0Sc9eHVax6L3MTTE425Y,30953 +cherrypy/test/test_core.py,sha256=aZeMYYyl3YYfQWqWBAwgMk1Hp2pkz_izHtwVE9BWL9M,28039 +cherrypy/test/test_dynamicobjectmapping.py,sha256=yTqw_mrm8Jzzsa1UCEUfZ-V5_gI4Up9vwdhgcwhmld8,12393 +cherrypy/test/test_encoding.py,sha256=3Np9WtdKUNfLl6kIePeHlJrRCAeWkRv0QwCBJAnTo_4,16647 +cherrypy/test/test_etags.py,sha256=VEa5oA1zD3PqYKfLzFYXNHtnb0IzXOqOlY-AGXMp_Sg,3093 +cherrypy/test/test_http.py,sha256=b3bdpK49xufkwCVQFtAvZ4-HATDAlRIDpM9sucx3Jr8,9933 +cherrypy/test/test_httpauth.py,sha256=1fKirgAkRcrluHzmWCrsr70RoHpaa6bCIIkiVODGuZI,6313 +cherrypy/test/test_httplib.py,sha256=LHTbWmiglY1qW_40I_pLVcrmqeOxiq2ww6nTb9anTKU,1287 +cherrypy/test/test_iterator.py,sha256=oi3Ja8BCNMC6s7cIYK1bbQLXZvmriwG2yR-vjERIYk0,5709 +cherrypy/test/test_json.py,sha256=8-r2dIvQ_6IHADT6bA4pGMw6-unRWg998AXIKrNmcQs,2864 +cherrypy/test/test_logging.py,sha256=4leoBQTGz7_75oHzch5vBwLB4jDu_GcGDypIMlSIzXM,6053 +cherrypy/test/test_mime.py,sha256=37gS3iX0PcHLdm-EuPO01ZkyeFwlwiRsnbin9JpVnbw,4602 +cherrypy/test/test_misc_tools.py,sha256=AbeWO7rznVs839Jhp6VWBDuzFkvgbZ7xAP8CZQcD_W4,7453 +cherrypy/test/test_objectmapping.py,sha256=qiP1zq2BmG00HEAYpJzEsP9aZ-7KO0TnIa4sP-Tw20k,14521 +cherrypy/test/test_params.py,sha256=M17ptv_JX0yQAwgE98cCwhXBZhllh1zCPgPQo_x_Cu8,1819 +cherrypy/test/test_proxy.py,sha256=qb1Tp5cQfidZXgoDJ5DJEQGktvjJdFdwOmxhIwYsf2g,5062 +cherrypy/test/test_refleaks.py,sha256=1qsVAJ4AOB8ma7BWpY9siGwpTUXG_b-CO-8K_udNpmg,1535 +cherrypy/test/test_request_obj.py,sha256=gX2TwPC1pCfFZfb8Tv49g9dXWtK1XOuuQcv_UBK53Bk,32339 +cherrypy/test/test_routes.py,sha256=63-JJS5PSwamgXHat1QKkuNKLINaLAvZc3hM8F4hn4M,2366 +cherrypy/test/test_session.py,sha256=jWUj6Xm_UxRUNKIZe3afqCLzc3AofJzKpsfZjrevysU,17150 +cherrypy/test/test_sessionauthenticate.py,sha256=Jcp9mjXMiAhMjS8NKEKtkkH0bFm6LTS6zUGHc31-hnM,2013 +cherrypy/test/test_states.py,sha256=9Iz-lVx27Sj0oXo3LlmvLXz5EwoLhfMZvhJybqRSWTY,19091 +cherrypy/test/test_static.py,sha256=rBbKTUrKIB7VBoLeTCXzfeGEaDb-85ky9h8G0kJT-zM,14051 +cherrypy/test/test_tools.py,sha256=gi-N2WYNkXvv57VaYH8XVo0IyRbQFHCcO-4Fq6h0GZU,16899 +cherrypy/test/test_tutorials.py,sha256=jq9-AMO0UicQ35IMERyrg22uYCR-oQhXPq8BUlpRzK4,7239 +cherrypy/test/test_virtualhost.py,sha256=WDz1rD69PwRiGh6xNResy36o_ILDgBFlNZAcVyR5l_c,4020 +cherrypy/test/test_wsgi_ns.py,sha256=2SLVbU4Fe1Znt1AC561fh06BM-FGpZGRcEPa3v4eARU,2868 +cherrypy/test/test_wsgi_unix_socket.py,sha256=nkCl6LEohAI186awz65weREZ0o9BgKRdpDMaTVsMv6A,2635 +cherrypy/test/test_wsgi_vhost.py,sha256=Wsi0bibrBUhJBOkPzuxy4-SELbMLpk7Hl6P_oND8Xfo,1034 +cherrypy/test/test_wsgiapps.py,sha256=IatsMtVjrsHPBuM4e5nAGMZcdcs2sxJSxyDN_xJ6Usg,4190 +cherrypy/test/test_xmlrpc.py,sha256=2fz5sFBwkW2HRPsrcxJ7gkTRrMJzQtG71ZhR-0ukPrk,5830 +cherrypy/test/webtest.py,sha256=_JjzK2J8dLdIlvQwdh1rBMCIkUJpDgfMpiHTMKRGqgo,20399 +cherrypy/tutorial/__init__.py,sha256=cmLXfvQI0L6trCXwDzR0WE1bu4JZYt301HJRNhjZOBc,85 +cherrypy/tutorial/tut01_helloworld.py,sha256=MRt9GwP9IoUcXOG4Cq1-Asge1ToX1kvTjpLY5U-5VyI,1014 +cherrypy/tutorial/tut02_expose_methods.py,sha256=kYSTvBRXBDE-uQekMW66rVTRaVC2rS3UySyEi_KSNS8,799 +cherrypy/tutorial/tut03_get_and_post.py,sha256=VTXZfGr7KRzo-NXG713RsHh4FEqxjoS3agzoy15Ksuo,1586 +cherrypy/tutorial/tut04_complex_site.py,sha256=TZSRXINZVJIgTPnWtH4uunwjROZIid0QWLxNcpoySM4,2947 +cherrypy/tutorial/tut05_derived_objects.py,sha256=N9w8D2ohznvBbPdWb5OhkQLcidDCiLuMBMtJIX8ofp0,2140 +cherrypy/tutorial/tut06_default_method.py,sha256=AcIdYCpd08czHL7VdeCI_k4t7dALobxledk0ss8RHPk,2263 +cherrypy/tutorial/tut07_sessions.py,sha256=jSoXprvwLUxvR7QiuyCD0bHLDVYRlJk4mpNWTYnqrWU,1227 +cherrypy/tutorial/tut08_generators_and_yield.py,sha256=GWIWPhkQ9v5v6ReNjoZhPvnoK6H0_mUw1vut87WWoHE,1287 +cherrypy/tutorial/tut09_files.py,sha256=tTk_CCgQrv8xWr-L-YgUx5mQD6HELYvfJ3uBhtY_wIQ,3462 +cherrypy/tutorial/tut10_http_errors.py,sha256=NWCe8DJrNLRghNUdTRB1Ge_2dBvgYBCyv8QGNzpfHw4,2705 +cherrypy/wsgiserver/__init__.py,sha256=Hf2IefJVSLv45C8j7-JwrQS_CGP0_NTAZQ3WdXMQTmI,92609 +cherrypy/wsgiserver/ssl_builtin.py,sha256=MCCgEbSg1SVmi4AaYyghoSBqT-q99YxZcK9M5_D6D8A,3833 +CherryPy-8.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cherrypy/test/__pycache__/benchmark.cpython-34.pyc,, +cherrypy/test/__pycache__/test_json.cpython-34.pyc,, +cherrypy/test/__pycache__/test_config_server.cpython-34.pyc,, +cherrypy/test/__pycache__/test_httpauth.cpython-34.pyc,, +cherrypy/test/__pycache__/test_misc_tools.cpython-34.pyc,, +cherrypy/test/__pycache__/helper.cpython-34.pyc,, +cherrypy/lib/__pycache__/reprconf.cpython-34.pyc,, +cherrypy/test/__pycache__/test_auth_basic.cpython-34.pyc,, +cherrypy/lib/__pycache__/auth_digest.cpython-34.pyc,, +cherrypy/__pycache__/__main__.cpython-34.pyc,, +cherrypy/process/__pycache__/plugins.cpython-34.pyc,, +cherrypy/__pycache__/_helper.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-34.pyc,, +cherrypy/lib/__pycache__/jsontools.cpython-34.pyc,, +cherrypy/test/__pycache__/test_conn.cpython-34.pyc,, +cherrypy/__pycache__/_cptools.cpython-34.pyc,, +cherrypy/__pycache__/daemon.cpython-34.pyc,, +cherrypy/wsgiserver/__pycache__/ssl_builtin.cpython-34.pyc,, +cherrypy/test/__pycache__/test_xmlrpc.cpython-34.pyc,, +cherrypy/test/__pycache__/sessiondemo.cpython-34.pyc,, +cherrypy/test/__pycache__/test_caching.cpython-34.pyc,, +cherrypy/__pycache__/_cptree.cpython-34.pyc,, +cherrypy/__pycache__/_cpreqbody.cpython-34.pyc,, +cherrypy/lib/__pycache__/static.cpython-34.pyc,, +cherrypy/lib/__pycache__/lockfile.cpython-34.pyc,, +cherrypy/__pycache__/_cplogging.cpython-34.pyc,, +cherrypy/__pycache__/_cprequest.cpython-34.pyc,, +cherrypy/test/__pycache__/test_proxy.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-34.pyc,, +cherrypy/process/__pycache__/servers.cpython-34.pyc,, +cherrypy/test/__pycache__/test_auth_digest.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-34.pyc,, +cherrypy/test/__pycache__/modpy.cpython-34.pyc,, +cherrypy/lib/__pycache__/locking.cpython-34.pyc,, +cherrypy/test/__pycache__/logtest.cpython-34.pyc,, +cherrypy/__pycache__/_cperror.cpython-34.pyc,, +cherrypy/test/__pycache__/_test_states_demo.cpython-34.pyc,, +cherrypy/test/__pycache__/test_mime.cpython-34.pyc,, +cherrypy/test/__pycache__/test_core.cpython-34.pyc,, +cherrypy/test/__pycache__/test_objectmapping.cpython-34.pyc,, +cherrypy/scaffold/__pycache__/__init__.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-34.pyc,, +cherrypy/test/__pycache__/modfastcgi.cpython-34.pyc,, +cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-34.pyc,, +cherrypy/test/__pycache__/test_states.cpython-34.pyc,, +cherrypy/__pycache__/_cpnative_server.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut07_sessions.cpython-34.pyc,, +cherrypy/test/__pycache__/test_params.cpython-34.pyc,, +cherrypy/test/__pycache__/test_tutorials.cpython-34.pyc,, +cherrypy/test/__pycache__/test_session.cpython-34.pyc,, +cherrypy/test/__pycache__/test_tools.cpython-34.pyc,, +cherrypy/lib/__pycache__/auth.cpython-34.pyc,, +cherrypy/lib/__pycache__/sessions.cpython-34.pyc,, +cherrypy/test/__pycache__/test_wsgi_ns.cpython-34.pyc,, +cherrypy/__pycache__/_cpmodpy.cpython-34.pyc,, +cherrypy/process/__pycache__/wspbus.cpython-34.pyc,, +cherrypy/lib/__pycache__/httputil.cpython-34.pyc,, +cherrypy/test/__pycache__/test_httplib.cpython-34.pyc,, +cherrypy/test/__pycache__/test_static.cpython-34.pyc,, +cherrypy/lib/__pycache__/xmlrpcutil.cpython-34.pyc,, +cherrypy/test/__pycache__/test_config.cpython-34.pyc,, +cherrypy/test/__pycache__/test_routes.cpython-34.pyc,, +cherrypy/__pycache__/_cpwsgi_server.cpython-34.pyc,, +cherrypy/__pycache__/_cpconfig.cpython-34.pyc,, +cherrypy/test/__pycache__/test_logging.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-34.pyc,, +cherrypy/test/__pycache__/checkerdemo.cpython-34.pyc,, +cherrypy/__pycache__/_cpcompat.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut06_default_method.cpython-34.pyc,, +cherrypy/test/__pycache__/test_encoding.cpython-34.pyc,, +cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-34.pyc,, +cherrypy/test/__pycache__/modfcgid.cpython-34.pyc,, +cherrypy/test/__pycache__/test_request_obj.cpython-34.pyc,, +cherrypy/test/__pycache__/test_bus.cpython-34.pyc,, +cherrypy/lib/__pycache__/auth_basic.cpython-34.pyc,, +cherrypy/lib/__pycache__/covercp.cpython-34.pyc,, +cherrypy/test/__pycache__/test_refleaks.cpython-34.pyc,, +cherrypy/wsgiserver/__pycache__/__init__.cpython-34.pyc,, +cherrypy/test/__pycache__/test_wsgi_vhost.cpython-34.pyc,, +cherrypy/test/__pycache__/modwsgi.cpython-34.pyc,, +cherrypy/test/__pycache__/test_etags.cpython-34.pyc,, +cherrypy/test/__pycache__/test_compat.cpython-34.pyc,, +cherrypy/test/__pycache__/test_wsgiapps.cpython-34.pyc,, +cherrypy/test/__pycache__/test_http.cpython-34.pyc,, +cherrypy/lib/__pycache__/gctools.cpython-34.pyc,, +cherrypy/lib/__pycache__/encoding.cpython-34.pyc,, +cherrypy/__pycache__/_cpwsgi.cpython-34.pyc,, +cherrypy/test/__pycache__/test_iterator.cpython-34.pyc,, +cherrypy/test/__pycache__/__init__.cpython-34.pyc,, +cherrypy/lib/__pycache__/profiler.cpython-34.pyc,, +cherrypy/lib/__pycache__/cpstats.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut09_files.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-34.pyc,, +cherrypy/__pycache__/__init__.cpython-34.pyc,, +cherrypy/process/__pycache__/win32.cpython-34.pyc,, +cherrypy/__pycache__/_cpthreadinglocal.cpython-34.pyc,, +cherrypy/tutorial/__pycache__/__init__.cpython-34.pyc,, +cherrypy/lib/__pycache__/cptools.cpython-34.pyc,, +cherrypy/test/__pycache__/test_sessionauthenticate.cpython-34.pyc,, +cherrypy/test/__pycache__/_test_decorators.cpython-34.pyc,, +cherrypy/__pycache__/_cpdispatch.cpython-34.pyc,, +cherrypy/__pycache__/_cpchecker.cpython-34.pyc,, +cherrypy/lib/__pycache__/httpauth.cpython-34.pyc,, +cherrypy/lib/__pycache__/__init__.cpython-34.pyc,, +cherrypy/test/__pycache__/webtest.cpython-34.pyc,, +cherrypy/lib/__pycache__/caching.cpython-34.pyc,, +cherrypy/process/__pycache__/__init__.cpython-34.pyc,, +cherrypy/__pycache__/_cpserver.cpython-34.pyc,, +cherrypy/test/__pycache__/test_virtualhost.cpython-34.pyc,, diff --git a/deps/CherryPy-8.1.0.dist-info/WHEEL b/deps/CherryPy-8.1.0.dist-info/WHEEL new file mode 100644 index 00000000..bb7f7dba --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/deps/CherryPy-8.1.0.dist-info/metadata.json b/deps/CherryPy-8.1.0.dist-info/metadata.json new file mode 100644 index 00000000..8844781f --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: Freely Distributable", "Operating System :: OS Independent", "Framework :: CherryPy", "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", "Topic :: Internet :: WWW/HTTP :: WSGI :: Server", "Topic :: Software Development :: Libraries :: Application Frameworks"], "extensions": {"python.details": {"contacts": [{"email": "team@cherrypy.org", "name": "CherryPy Team", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://www.cherrypy.org"}}}, "extras": ["This section defines feature flags end-users can use in dependenciesmemcached-session"], "generator": "bdist_wheel (0.29.0)", "license": "BSD", "metadata_version": "2.0", "name": "CherryPy", "run_requires": [{"extra": "This section defines feature flags end-users can use in dependenciesmemcached-session", "requires": ["python-memcached (>=1.58)"]}, {"requires": ["six"]}], "summary": "Object-Oriented HTTP framework", "test_requires": [{"requires": ["tox"]}], "version": "8.1.0"} \ No newline at end of file diff --git a/deps/CherryPy-8.1.0.dist-info/top_level.txt b/deps/CherryPy-8.1.0.dist-info/top_level.txt new file mode 100644 index 00000000..d7187069 --- /dev/null +++ b/deps/CherryPy-8.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +cherrypy diff --git a/deps/PyChromecast-0.7.4.dist-info/DESCRIPTION.rst b/deps/PyChromecast-0.7.4.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..06e83d73 --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,160 @@ +pychromecast |Build Status| +=========================== + +.. |Build Status| image:: https://travis-ci.org/balloob/pychromecast.svg?branch=master + :target: https://travis-ci.org/balloob/pychromecast + +Library for Python 2 and 3 to communicate with the Google Chromecast. It +currently supports: + +- Auto discovering connected Chromecasts on the network +- Start the default media receiver and play any online media +- Control playback of current playing media +- Implement Google Chromecast api v2 +- Communicate with apps via channels +- Easily extendable to add support for unsupported namespaces +- Multi-room setups with Audio cast devices + +*PyChromecast 0.6 introduces some backward incompatible changes due to +the migration from DIAL to socket for retrieving the app status.* + +Dependencies +------------ + +PyChromecast depends on the Python packages requests, protobuf and +zeroconf. Make sure you have these dependencies installed using +``pip install -r requirements.txt`` + +Some users running Python 2.7 have `reported`_ that they had to upgrade +their version of pip using ``pip install --upgrade pip`` before they +were able to install the latest version of the dependencies. + +.. _reported: https://github.com/balloob/pychromecast/issues/47#issuecomment-107822162 + +How to use +---------- + +.. code:: python + + >> from __future__ import print_function + >> import time + >> import pychromecast + + >> pychromecast.get_chromecasts_as_dict().keys() + ['Dev', 'Living Room', 'Den', 'Bedroom'] + + >> cast = pychromecast.get_chromecast(friendly_name="Living Room") + >> # Wait for cast device to be ready + >> cast.wait() + >> print(cast.device) + DeviceStatus(friendly_name='Living Room', model_name='Chromecast', manufacturer='Google Inc.', api_version=(1, 0), uuid=UUID('df6944da-f016-4cb8-97d0-3da2ccaa380b'), cast_type='cast') + + >> print(cast.status) + CastStatus(is_active_input=True, is_stand_by=False, volume_level=1.0, volume_muted=False, app_id=u'CC1AD845', display_name=u'Default Media Receiver', namespaces=[u'urn:x-cast:com.google.cast.player.message', u'urn:x-cast:com.google.cast.media'], session_id=u'CCA39713-9A4F-34A6-A8BF-5D97BE7ECA5C', transport_id=u'web-9', status_text='') + + >> mc = cast.media_controller + >> mc.play_media('http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4', 'video/mp4') + >> print(mc.status) + MediaStatus(current_time=42.458322, content_id=u'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4', content_type=u'video/mp4', duration=596.474195, stream_type=u'BUFFERED', idle_reason=None, media_session_id=1, playback_rate=1, player_state=u'PLAYING', supported_media_commands=15, volume_level=1, volume_muted=False) + + >> mc.pause() + >> time.sleep(5) + >> mc.play() + +Adding support for extra namespaces +----------------------------------- + +Each app that runs on the Chromecast supports namespaces. They specify a +JSON-based mini-protocol. This is used to communicate between the +Chromecast and your phone/browser and now Python. + +Support for extra namespaces is added by using controllers. To add your own namespace to a current chromecast instance you will first have to define your controller. Example of a minimal controller: + +.. code:: python + + from pychromecast.controllers import BaseController + + class MyController(BaseController): + def __init__(self): + super(MyController, self).__init__( + "urn:x-cast:my.super.awesome.namespace") + + def receive_message(self, message, data): + print("Wow, I received this message: {}".format(data)) + + return True # indicate you handled this message + + def request_beer(self): + self.send_message({'request': 'beer'}) + +After you have defined your controller you will have to add an instance to a Chromecast object: `cast.register_handler(MyController())`. When a message is received with your namespace it will be routed to your controller. + +For more options see the `BaseController`_. For an example of a fully implemented controller see the `MediaController`_. + +.. _BaseController: https://github.com/balloob/pychromecast/blob/master/pychromecast/controllers/__init__.py +.. _MediaController: https://github.com/balloob/pychromecast/blob/master/pychromecast/controllers/media.py + +Exploring existing namespaces +------------------------------- +So you've got PyChromecast running and decided it is time to add support to your favorite app. No worries, the following instructions will have you covered in exploring the possibilities. + +The following instructions require the use of the `Google Chrome browser`_ and the `Google Cast plugin`_. + + * In Chrome, go to `chrome://net-internals/#capture` + * Enable the checkbox 'Include the actual bytes sent/received.' + * Open a new tab, browse to your favorite application on the web that has Chromecast support and start casting. + * Go back to the tab that is capturing events and click on stop. + * From the dropdown click on events. This will show you a table with events that happened while you were recording. + * In the filter box enter the text `Tr@n$p0rt`. This should give one SOCKET connection as result: the connection with your Chromecast. + * Go through the results and collect the JSON that is exchanged. + * Now write a controller that is able to mimic this behavior :-) + +.. _Google Chrome Browser: https://www.google.com/chrome/ +.. _Google Cast Plugin: https://chrome.google.com/webstore/detail/google-cast/boadgeojelhgndaghljhdicfkmllpafd + +Ignoring CEC Data +----------------- +The Chromecast typically reports whether it is the active input on the device +to which it is connected. This value is stored inside a cast object in the +following property. + +.. code:: python + + cast.status.is_active_input + +Some Chromecast users have reported CEC incompatibilities with their media +center devices. These incompatibilities may sometimes cause this active input +value to be reported improperly. + +This active input value is typically used to determine if the Chromecast +is idle. PyChromecast is capable of ignoring the active input value when +determining if the Chromecast is idle in the instance that the +Chromecast is returning erroneous values. To ignore this CEC detection +data in PyChromecast, append a `Linux style wildcard`_ formatted string +to the IGNORE\_CEC list in PyChromecast like in the example below. + +.. code:: python + + pychromecast.IGNORE_CEC.append('*') # Ignore CEC on all devices + pychromecast.IGNORE_CEC.append('Living Room') # Ignore CEC on Chromecasts named Living Room + +Maintainers +----------- + +- Jan Borsodi (`@am0s`_) +- Ryan Kraus (`@rmkraus`_) +- Paulus Schoutsen (`@balloob`_, original author) + +Thanks +------ + +I would like to thank `Fred Clift`_ for laying the socket client ground +work. Without him it would not have been possible! + +.. _Linux style wildcard: http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm +.. _@am0s: https://github.com/am0s +.. _@rmkraus: https://github.com/rmkraus +.. _@balloob: https://github.com/balloob +.. _Fred Clift: https://github.com/minektur + + diff --git a/deps/PyChromecast-0.7.4.dist-info/INSTALLER b/deps/PyChromecast-0.7.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/PyChromecast-0.7.4.dist-info/METADATA b/deps/PyChromecast-0.7.4.dist-info/METADATA new file mode 100644 index 00000000..2a8fd8df --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/METADATA @@ -0,0 +1,181 @@ +Metadata-Version: 2.0 +Name: PyChromecast +Version: 0.7.4 +Summary: Python module to talk to Google Chromecast. +Home-page: https://github.com/balloob/pychromecast +Author: Paulus Schoutsen +Author-email: paulus@paulusschoutsen.nl +License: MIT +Platform: any +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: protobuf (==3.0.0b2) +Requires-Dist: requests (>=2.0) +Requires-Dist: six (>=1.10.0) +Requires-Dist: zeroconf (>=0.17.4) + +pychromecast |Build Status| +=========================== + +.. |Build Status| image:: https://travis-ci.org/balloob/pychromecast.svg?branch=master + :target: https://travis-ci.org/balloob/pychromecast + +Library for Python 2 and 3 to communicate with the Google Chromecast. It +currently supports: + +- Auto discovering connected Chromecasts on the network +- Start the default media receiver and play any online media +- Control playback of current playing media +- Implement Google Chromecast api v2 +- Communicate with apps via channels +- Easily extendable to add support for unsupported namespaces +- Multi-room setups with Audio cast devices + +*PyChromecast 0.6 introduces some backward incompatible changes due to +the migration from DIAL to socket for retrieving the app status.* + +Dependencies +------------ + +PyChromecast depends on the Python packages requests, protobuf and +zeroconf. Make sure you have these dependencies installed using +``pip install -r requirements.txt`` + +Some users running Python 2.7 have `reported`_ that they had to upgrade +their version of pip using ``pip install --upgrade pip`` before they +were able to install the latest version of the dependencies. + +.. _reported: https://github.com/balloob/pychromecast/issues/47#issuecomment-107822162 + +How to use +---------- + +.. code:: python + + >> from __future__ import print_function + >> import time + >> import pychromecast + + >> pychromecast.get_chromecasts_as_dict().keys() + ['Dev', 'Living Room', 'Den', 'Bedroom'] + + >> cast = pychromecast.get_chromecast(friendly_name="Living Room") + >> # Wait for cast device to be ready + >> cast.wait() + >> print(cast.device) + DeviceStatus(friendly_name='Living Room', model_name='Chromecast', manufacturer='Google Inc.', api_version=(1, 0), uuid=UUID('df6944da-f016-4cb8-97d0-3da2ccaa380b'), cast_type='cast') + + >> print(cast.status) + CastStatus(is_active_input=True, is_stand_by=False, volume_level=1.0, volume_muted=False, app_id=u'CC1AD845', display_name=u'Default Media Receiver', namespaces=[u'urn:x-cast:com.google.cast.player.message', u'urn:x-cast:com.google.cast.media'], session_id=u'CCA39713-9A4F-34A6-A8BF-5D97BE7ECA5C', transport_id=u'web-9', status_text='') + + >> mc = cast.media_controller + >> mc.play_media('http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4', 'video/mp4') + >> print(mc.status) + MediaStatus(current_time=42.458322, content_id=u'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4', content_type=u'video/mp4', duration=596.474195, stream_type=u'BUFFERED', idle_reason=None, media_session_id=1, playback_rate=1, player_state=u'PLAYING', supported_media_commands=15, volume_level=1, volume_muted=False) + + >> mc.pause() + >> time.sleep(5) + >> mc.play() + +Adding support for extra namespaces +----------------------------------- + +Each app that runs on the Chromecast supports namespaces. They specify a +JSON-based mini-protocol. This is used to communicate between the +Chromecast and your phone/browser and now Python. + +Support for extra namespaces is added by using controllers. To add your own namespace to a current chromecast instance you will first have to define your controller. Example of a minimal controller: + +.. code:: python + + from pychromecast.controllers import BaseController + + class MyController(BaseController): + def __init__(self): + super(MyController, self).__init__( + "urn:x-cast:my.super.awesome.namespace") + + def receive_message(self, message, data): + print("Wow, I received this message: {}".format(data)) + + return True # indicate you handled this message + + def request_beer(self): + self.send_message({'request': 'beer'}) + +After you have defined your controller you will have to add an instance to a Chromecast object: `cast.register_handler(MyController())`. When a message is received with your namespace it will be routed to your controller. + +For more options see the `BaseController`_. For an example of a fully implemented controller see the `MediaController`_. + +.. _BaseController: https://github.com/balloob/pychromecast/blob/master/pychromecast/controllers/__init__.py +.. _MediaController: https://github.com/balloob/pychromecast/blob/master/pychromecast/controllers/media.py + +Exploring existing namespaces +------------------------------- +So you've got PyChromecast running and decided it is time to add support to your favorite app. No worries, the following instructions will have you covered in exploring the possibilities. + +The following instructions require the use of the `Google Chrome browser`_ and the `Google Cast plugin`_. + + * In Chrome, go to `chrome://net-internals/#capture` + * Enable the checkbox 'Include the actual bytes sent/received.' + * Open a new tab, browse to your favorite application on the web that has Chromecast support and start casting. + * Go back to the tab that is capturing events and click on stop. + * From the dropdown click on events. This will show you a table with events that happened while you were recording. + * In the filter box enter the text `Tr@n$p0rt`. This should give one SOCKET connection as result: the connection with your Chromecast. + * Go through the results and collect the JSON that is exchanged. + * Now write a controller that is able to mimic this behavior :-) + +.. _Google Chrome Browser: https://www.google.com/chrome/ +.. _Google Cast Plugin: https://chrome.google.com/webstore/detail/google-cast/boadgeojelhgndaghljhdicfkmllpafd + +Ignoring CEC Data +----------------- +The Chromecast typically reports whether it is the active input on the device +to which it is connected. This value is stored inside a cast object in the +following property. + +.. code:: python + + cast.status.is_active_input + +Some Chromecast users have reported CEC incompatibilities with their media +center devices. These incompatibilities may sometimes cause this active input +value to be reported improperly. + +This active input value is typically used to determine if the Chromecast +is idle. PyChromecast is capable of ignoring the active input value when +determining if the Chromecast is idle in the instance that the +Chromecast is returning erroneous values. To ignore this CEC detection +data in PyChromecast, append a `Linux style wildcard`_ formatted string +to the IGNORE\_CEC list in PyChromecast like in the example below. + +.. code:: python + + pychromecast.IGNORE_CEC.append('*') # Ignore CEC on all devices + pychromecast.IGNORE_CEC.append('Living Room') # Ignore CEC on Chromecasts named Living Room + +Maintainers +----------- + +- Jan Borsodi (`@am0s`_) +- Ryan Kraus (`@rmkraus`_) +- Paulus Schoutsen (`@balloob`_, original author) + +Thanks +------ + +I would like to thank `Fred Clift`_ for laying the socket client ground +work. Without him it would not have been possible! + +.. _Linux style wildcard: http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm +.. _@am0s: https://github.com/am0s +.. _@rmkraus: https://github.com/rmkraus +.. _@balloob: https://github.com/balloob +.. _Fred Clift: https://github.com/minektur + + diff --git a/deps/PyChromecast-0.7.4.dist-info/RECORD b/deps/PyChromecast-0.7.4.dist-info/RECORD new file mode 100644 index 00000000..6d66497e --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/RECORD @@ -0,0 +1,42 @@ +.DS_Store,sha256=z2gRZJvEM_WGkHGXKJ65r7UPG7qICbiawVsPXEnp-8c,6148 +PyChromecast-0.7.4.dist-info/DESCRIPTION.rst,sha256=SSmbtGe3rWS2588qTAdJ0VQvN9YzIT8RPIyhOgyxA6A,7126 +PyChromecast-0.7.4.dist-info/METADATA,sha256=S_HTSuuPz09zMaXxGrFtgHo5C_-eWaS1GDEtJviJhPM,7866 +PyChromecast-0.7.4.dist-info/RECORD,, +PyChromecast-0.7.4.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 +PyChromecast-0.7.4.dist-info/metadata.json,sha256=kJPpgubHP3GQOpcv9gIQcba_QDjnDxnjPh9tOJQHr0o,903 +PyChromecast-0.7.4.dist-info/pbr.json,sha256=WvxHilx5XoxsLZlvp6nyc79cgMObi5TSKj6KUiSdTe4,47 +PyChromecast-0.7.4.dist-info/top_level.txt,sha256=rO8Gp9im127G1t-Kzk0r2Lh2y19ZaIG91OBRZpw0gTA,13 +pychromecast/.DS_Store,sha256=oDO_w-SWfCkAPjvaG6BL1qB2KvyzpPpFv8MbYH9fljg,6148 +pychromecast/__init__.py,sha256=U3Y46fioyGyvkNNDaulVorIspSbFK6rpVPKeANWIPPY,15479 +pychromecast/authority_keys_pb2.py,sha256=b4lL5q50P1OJNn-z2JvkUF3uoc347geHiFqeyV8kq-w,3985 +pychromecast/cast_channel_pb2.py,sha256=JPXrpcOWoeNIOWf9-FZFooAfYLgrxSDKfiIuReCmCpk,16191 +pychromecast/config.py,sha256=28hjJlQ2vzZbqS12VW6a42ry6NNo1khMJ9NDYyd2zm8,1012 +pychromecast/dial.py,sha256=_-rSh3r8_MIWwfKY1Wqs7NEAKKcE0K0Vnhl1VVrO0KU,3877 +pychromecast/discovery.py,sha256=1S5wvFaf-4WrRKfdfe7-stPEP8TLU2L6GeIAmnn36oU,2578 +pychromecast/error.py,sha256=4tQQ-I4c2k0OgJWsn-kAcR1konUPq2DzfB6qtSJ2oUw,1334 +pychromecast/logging_pb2.py,sha256=2vOWKAt7iBjp16sdOwpFvDSlHtzQqq13qoKR7bL6Dik,34008 +pychromecast/socket_client.py,sha256=9uET2UNvjW4ra5RgdniWlJBhat54lqqh_wmpGhfWOHw,33787 +pychromecast/upnp.py,sha256=A8WsgB5ay98e35T4b-UxxItcQVQOka4TICbi8JUv-mg,2732 +pychromecast/websocket.py,sha256=9ydyIh7b7eoIA_yXvk-HRuHowhPd9bmQaoyLYgXn4y4,13417 +pychromecast/youtube.py,sha256=2iHHo-VnvSjWY2xKjd8lGDqvg3sYpqaExWnx-Puccxc,1054 +pychromecast/controllers/__init__.py,sha256=qUwhRxqUi8bRO-ejv2azlGbm776wh4iiWEIp4lJ-IkQ,3593 +pychromecast/controllers/media.py,sha256=4HNKUTsejuRU2w8AtGLrifuN-mtYRW-HP-t5QIo28cU,16129 +pychromecast/controllers/plex.py,sha256=u1C5WGkoqAlzGdvNF1nCo9riuDW9s_idxhfhY4ZupXQ,703 +pychromecast/controllers/youtube.py,sha256=eTZb0qK4fua9ai3UQFDcJMcIMfLTAVPwWSlGIvUXI6Y,1293 +PyChromecast-0.7.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pychromecast/__pycache__/logging_pb2.cpython-34.pyc,, +pychromecast/__pycache__/authority_keys_pb2.cpython-34.pyc,, +pychromecast/__pycache__/dial.cpython-34.pyc,, +pychromecast/controllers/__pycache__/__init__.cpython-34.pyc,, +pychromecast/__pycache__/upnp.cpython-34.pyc,, +pychromecast/__pycache__/socket_client.cpython-34.pyc,, +pychromecast/__pycache__/error.cpython-34.pyc,, +pychromecast/controllers/__pycache__/youtube.cpython-34.pyc,, +pychromecast/__pycache__/config.cpython-34.pyc,, +pychromecast/__pycache__/cast_channel_pb2.cpython-34.pyc,, +pychromecast/controllers/__pycache__/plex.cpython-34.pyc,, +pychromecast/__pycache__/__init__.cpython-34.pyc,, +pychromecast/__pycache__/youtube.cpython-34.pyc,, +pychromecast/__pycache__/discovery.cpython-34.pyc,, +pychromecast/controllers/__pycache__/media.cpython-34.pyc,, +pychromecast/__pycache__/websocket.cpython-34.pyc,, diff --git a/deps/PyChromecast-0.7.4.dist-info/WHEEL b/deps/PyChromecast-0.7.4.dist-info/WHEEL new file mode 100644 index 00000000..0de529b1 --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deps/PyChromecast-0.7.4.dist-info/metadata.json b/deps/PyChromecast-0.7.4.dist-info/metadata.json new file mode 100644 index 00000000..e3f2d0e0 --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Python module to talk to Google Chromecast.", "classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/balloob/pychromecast"}, "contacts": [{"email": "paulus@paulusschoutsen.nl", "name": "Paulus Schoutsen", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT", "metadata_version": "2.0", "name": "PyChromecast", "platform": "any", "run_requires": [{"requires": ["protobuf (==3.0.0b2)", "requests (>=2.0)", "six (>=1.10.0)", "zeroconf (>=0.17.4)"]}], "extras": [], "version": "0.7.4"} \ No newline at end of file diff --git a/deps/PyChromecast-0.7.4.dist-info/pbr.json b/deps/PyChromecast-0.7.4.dist-info/pbr.json new file mode 100644 index 00000000..49267f98 --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/pbr.json @@ -0,0 +1 @@ +{"git_version": "03f56b3", "is_release": false} \ No newline at end of file diff --git a/deps/PyChromecast-0.7.4.dist-info/top_level.txt b/deps/PyChromecast-0.7.4.dist-info/top_level.txt new file mode 100644 index 00000000..a3edcf38 --- /dev/null +++ b/deps/PyChromecast-0.7.4.dist-info/top_level.txt @@ -0,0 +1 @@ +pychromecast diff --git a/deps/RPi.GPIO-0.6.2.dist-info/DESCRIPTION.rst b/deps/RPi.GPIO-0.6.2.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..a381c0e1 --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,204 @@ +This package provides a class to control the GPIO on a Raspberry Pi. + +Note that this module is unsuitable for real-time or timing critical applications. This is because you +can not predict when Python will be busy garbage collecting. It also runs under the Linux kernel which +is not suitable for real time applications - it is multitasking O/S and another process may be given +priority over the CPU, causing jitter in your program. If you are after true real-time performance and +predictability, buy yourself an Arduino http://www.arduino.cc ! + +Note that the current release does not support SPI, I2C, hardware PWM or serial functionality on the RPi yet. +This is planned for the near future - watch this space! One-wire functionality is also planned. + +Although hardware PWM is not available yet, software PWM is available to use on all channels. + +For examples and documentation, visit http://sourceforge.net/p/raspberry-gpio-python/wiki/Home/ + +Change Log +========== + +0.6.2 +----- +- Rewrote Debian packaging mechanism +- RPI_INFO reports Pi 3 +- Changed module layout - moved C components to RPi._GPIO + +0.6.1 +----- +- Update RPI_INFO to detect more board types +- Issue 118 - add_event_detect sometimes gives runtime error with unpriv user +- Issue 120 - setmode() remembers invalid mode + +0.6.0a3 +------- +- Now uses /dev/gpiomem if available to avoid being run as root +- Fix warnings with pull up/down on pins 3/5 +- Correct base address on Pi 2 when devicetree is disabled +- caddr_t error on compile (Issue 109) +- Error on invalid parameters to setup() (issue 93) +- Add timeout parameter to wait_for_edge() (issue 91) + +0.5.11 +------ +- Fix - pins > 26 missing when using BOARD mode +- Add getmode() +- Raise exception when a mix of modes is used +- GPIO.cleanaup() unsets the current pin mode + +0.5.10 +------ +- Issue 95 - support RPi 2 boards +- Introduce RPI_INFO +- Deprecate RPI_REVISION +- Issue 97 - fixed docstring for setup() + +0.5.9 +----- +- Issue 87 - warn about pull up/down on i2c pins +- Issue 86/75 - wait_for_edge() bugfix +- Issue 84 - recognise RPi properly when using a custom kernel +- Issue 90 - cleanup() on a list/tuple of channels + +0.5.8 +----- +- Allow lists/tuples of channels in GPIO.setup() +- GPIO.output() now allows lists/tuples of values +- GPIO.wait_for_edge() bug fixes (issue 78) + +0.5.7 +----- +- Issue 67 - speed up repeated calls to GPIO.wait_for_event() +- Added bouncetime keyword to GPIO.wait_for_event() +- Added extra edge/interrupt unit tests +- GPIO.wait_for_event() can now be mixed with GPIO.add_event_detect() +- Improved cleanups of events +- Issue 69 resolved + +0.5.6 +----- +- Issue 68 - support for RPi Model B+ +- Fix gpio_function() + +0.5.5 +----- +- Issue 52 - 'unallocate' a channel +- Issue 35 - use switchbounce with GPIO.event_detected() +- Refactored events code +- Rewrote tests to use unittest mechanism and new test board with loopbacks +- Fixed adding events after a GPIO.cleanup() +- Issue 64 - misleading /dev/mem permissions error +- Issue 59 - name collision with PWM constant and class + +0.5.4 +----- +- Changed release status (from alpha to full release) +- Warn when GPIO.cleanup() used with nothing to clean up (issue 44) +- Avoid collisions in constants (e.g. HIGH / RISING / PUD_DOWN) +- Accept BOARD numbers in gpio_function (issue 34) +- More return values for gpio_function (INPUT, OUTPUT, SPI, I2C, PWM, SERIAL, UNKNOWN) +- Tidy up docstrings +- Fix /dev/mem access error with gpio_function + +0.5.3a +------ +- Allow pydoc for non-root users (issue 27) +- Fix add_event_detect error when run as daemon (issue 32) +- Simplified exception types +- Changed from distribute to pip + +0.5.2a +------ +- Added software PWM (experimental) +- Added switch bounce handling to event callbacks +- Added channel number parameter to event callbacks (issue 31) +- Internal refactoring and code tidy up + +0.5.1a +------ +- Fixed callbacks for multiple GPIOs (issue 28) + +0.5.0a +------ +- Added new edge detection events (interrupt handling) + - Added add_event_detect() + - Added remove_event_detect() + - Added add_event_callback() + - Added wait_for_edge() +- Removed old experimental event functions + - Removed set_rising_event() + - Removed set_falling_event() + - Removed set_high_event() + - Removed set_low_event() +- Changed event_detected() for new edge detection functionality +- input() now returns 0/LOW == False or 1/HIGH == True (integers) instead of False or True (booleans). +- Fix error on repeated import (issue 3) +- Change SetupException to a RuntimeError so it can be caught on import (issue 25, Chris Hager ) +- Improved docstrings of functions + +0.4.2a +------ +- Fix for installing on Arch Linux (Python 3.3) (issue 20) +- Initial value when setting a channel as an output (issue 19) + +0.4.1a +------ +- Added VERSION +- Permit input() of channels set as outputs (Eric Ptak ) + +0.4.0a +------ +- Added support for Revision 2 boards +- Added RPI_REVISION +- Added cleanup() function and removed automatic reset functionality on program exit +- Added get_function() to read existing GPIO channel functionality (suggestion from Eric Ptak ) +- Added set_rising_event() +- Added set_falling_event() +- Added set_high_event() +- Added set_low_event() +- Added event_detected() +- Added test/test.py +- Converted debian to armhf +- Fixed C function short_wait() (thanks to Thibault Porteboeuf ) + +0.3.1a +------ +- Fixed critical bug with swapped high/low state on outputs +- Added pull-up / pull-down setup functionality for inputs + +0.3.0a +------ +- Rewritten as a C extension +- Now uses /dev/mem and SoC registers instead of /sys/class/gpio +- Faster! +- Make call to GPIO.setmode() mandatory +- Added GPIO.HIGH and GPIO.LOW constants + +0.2.0 +----- +- Changed status from alpha to beta +- Added setmode() to be able to use BCM GPIO 00.nn channel numbers +- Renamed InvalidPinException to InvalidChannelException + +0.1.0 +------ +- Fixed direction bug +- Added MANIFEST.in (to include missing file) +- Changed GPIO channel number to pin number +- Tested and working! + +0.0.3a +------ +- Added GPIO table +- Refactored +- Fixed a few critical bugs +- Still completely untested! + +0.0.2a +------ +- Internal refactoring. Still completely untested! + +0.0.1a +------ +- First version. Completely untested until I can get hold of a Raspberry Pi! + + + diff --git a/deps/RPi.GPIO-0.6.2.dist-info/INSTALLER b/deps/RPi.GPIO-0.6.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/RPi.GPIO-0.6.2.dist-info/METADATA b/deps/RPi.GPIO-0.6.2.dist-info/METADATA new file mode 100644 index 00000000..c509b942 --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/METADATA @@ -0,0 +1,224 @@ +Metadata-Version: 2.0 +Name: RPi.GPIO +Version: 0.6.2 +Summary: A module to control Raspberry Pi GPIO channels +Home-page: http://sourceforge.net/projects/raspberry-gpio-python/ +Author: Ben Croston +Author-email: ben@croston.org +License: MIT +Keywords: Raspberry Pi GPIO +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Operating System :: POSIX :: Linux +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Software Development +Classifier: Topic :: Home Automation +Classifier: Topic :: System :: Hardware + +This package provides a class to control the GPIO on a Raspberry Pi. + +Note that this module is unsuitable for real-time or timing critical applications. This is because you +can not predict when Python will be busy garbage collecting. It also runs under the Linux kernel which +is not suitable for real time applications - it is multitasking O/S and another process may be given +priority over the CPU, causing jitter in your program. If you are after true real-time performance and +predictability, buy yourself an Arduino http://www.arduino.cc ! + +Note that the current release does not support SPI, I2C, hardware PWM or serial functionality on the RPi yet. +This is planned for the near future - watch this space! One-wire functionality is also planned. + +Although hardware PWM is not available yet, software PWM is available to use on all channels. + +For examples and documentation, visit http://sourceforge.net/p/raspberry-gpio-python/wiki/Home/ + +Change Log +========== + +0.6.2 +----- +- Rewrote Debian packaging mechanism +- RPI_INFO reports Pi 3 +- Changed module layout - moved C components to RPi._GPIO + +0.6.1 +----- +- Update RPI_INFO to detect more board types +- Issue 118 - add_event_detect sometimes gives runtime error with unpriv user +- Issue 120 - setmode() remembers invalid mode + +0.6.0a3 +------- +- Now uses /dev/gpiomem if available to avoid being run as root +- Fix warnings with pull up/down on pins 3/5 +- Correct base address on Pi 2 when devicetree is disabled +- caddr_t error on compile (Issue 109) +- Error on invalid parameters to setup() (issue 93) +- Add timeout parameter to wait_for_edge() (issue 91) + +0.5.11 +------ +- Fix - pins > 26 missing when using BOARD mode +- Add getmode() +- Raise exception when a mix of modes is used +- GPIO.cleanaup() unsets the current pin mode + +0.5.10 +------ +- Issue 95 - support RPi 2 boards +- Introduce RPI_INFO +- Deprecate RPI_REVISION +- Issue 97 - fixed docstring for setup() + +0.5.9 +----- +- Issue 87 - warn about pull up/down on i2c pins +- Issue 86/75 - wait_for_edge() bugfix +- Issue 84 - recognise RPi properly when using a custom kernel +- Issue 90 - cleanup() on a list/tuple of channels + +0.5.8 +----- +- Allow lists/tuples of channels in GPIO.setup() +- GPIO.output() now allows lists/tuples of values +- GPIO.wait_for_edge() bug fixes (issue 78) + +0.5.7 +----- +- Issue 67 - speed up repeated calls to GPIO.wait_for_event() +- Added bouncetime keyword to GPIO.wait_for_event() +- Added extra edge/interrupt unit tests +- GPIO.wait_for_event() can now be mixed with GPIO.add_event_detect() +- Improved cleanups of events +- Issue 69 resolved + +0.5.6 +----- +- Issue 68 - support for RPi Model B+ +- Fix gpio_function() + +0.5.5 +----- +- Issue 52 - 'unallocate' a channel +- Issue 35 - use switchbounce with GPIO.event_detected() +- Refactored events code +- Rewrote tests to use unittest mechanism and new test board with loopbacks +- Fixed adding events after a GPIO.cleanup() +- Issue 64 - misleading /dev/mem permissions error +- Issue 59 - name collision with PWM constant and class + +0.5.4 +----- +- Changed release status (from alpha to full release) +- Warn when GPIO.cleanup() used with nothing to clean up (issue 44) +- Avoid collisions in constants (e.g. HIGH / RISING / PUD_DOWN) +- Accept BOARD numbers in gpio_function (issue 34) +- More return values for gpio_function (INPUT, OUTPUT, SPI, I2C, PWM, SERIAL, UNKNOWN) +- Tidy up docstrings +- Fix /dev/mem access error with gpio_function + +0.5.3a +------ +- Allow pydoc for non-root users (issue 27) +- Fix add_event_detect error when run as daemon (issue 32) +- Simplified exception types +- Changed from distribute to pip + +0.5.2a +------ +- Added software PWM (experimental) +- Added switch bounce handling to event callbacks +- Added channel number parameter to event callbacks (issue 31) +- Internal refactoring and code tidy up + +0.5.1a +------ +- Fixed callbacks for multiple GPIOs (issue 28) + +0.5.0a +------ +- Added new edge detection events (interrupt handling) + - Added add_event_detect() + - Added remove_event_detect() + - Added add_event_callback() + - Added wait_for_edge() +- Removed old experimental event functions + - Removed set_rising_event() + - Removed set_falling_event() + - Removed set_high_event() + - Removed set_low_event() +- Changed event_detected() for new edge detection functionality +- input() now returns 0/LOW == False or 1/HIGH == True (integers) instead of False or True (booleans). +- Fix error on repeated import (issue 3) +- Change SetupException to a RuntimeError so it can be caught on import (issue 25, Chris Hager ) +- Improved docstrings of functions + +0.4.2a +------ +- Fix for installing on Arch Linux (Python 3.3) (issue 20) +- Initial value when setting a channel as an output (issue 19) + +0.4.1a +------ +- Added VERSION +- Permit input() of channels set as outputs (Eric Ptak ) + +0.4.0a +------ +- Added support for Revision 2 boards +- Added RPI_REVISION +- Added cleanup() function and removed automatic reset functionality on program exit +- Added get_function() to read existing GPIO channel functionality (suggestion from Eric Ptak ) +- Added set_rising_event() +- Added set_falling_event() +- Added set_high_event() +- Added set_low_event() +- Added event_detected() +- Added test/test.py +- Converted debian to armhf +- Fixed C function short_wait() (thanks to Thibault Porteboeuf ) + +0.3.1a +------ +- Fixed critical bug with swapped high/low state on outputs +- Added pull-up / pull-down setup functionality for inputs + +0.3.0a +------ +- Rewritten as a C extension +- Now uses /dev/mem and SoC registers instead of /sys/class/gpio +- Faster! +- Make call to GPIO.setmode() mandatory +- Added GPIO.HIGH and GPIO.LOW constants + +0.2.0 +----- +- Changed status from alpha to beta +- Added setmode() to be able to use BCM GPIO 00.nn channel numbers +- Renamed InvalidPinException to InvalidChannelException + +0.1.0 +------ +- Fixed direction bug +- Added MANIFEST.in (to include missing file) +- Changed GPIO channel number to pin number +- Tested and working! + +0.0.3a +------ +- Added GPIO table +- Refactored +- Fixed a few critical bugs +- Still completely untested! + +0.0.2a +------ +- Internal refactoring. Still completely untested! + +0.0.1a +------ +- First version. Completely untested until I can get hold of a Raspberry Pi! + + + diff --git a/deps/RPi.GPIO-0.6.2.dist-info/RECORD b/deps/RPi.GPIO-0.6.2.dist-info/RECORD new file mode 100644 index 00000000..a925aa13 --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/RECORD @@ -0,0 +1,12 @@ +RPi/_GPIO.cpython-34m.so,sha256=vAgqvC2ElrF2cSyFgxJ_SvDLSJ6m5h6R77GxoBQ9nDg,132144 +RPi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +RPi/GPIO/__init__.py,sha256=jnxwfrIfmsuc4CHYCyD7fhmXuK910T8nJZly3TFbZ1s,1112 +RPi.GPIO-0.6.2.dist-info/DESCRIPTION.rst,sha256=XQZ6vEQCpIDCQyetosBFH2erMMSGGq-K6mG1Lw99Yrw,6289 +RPi.GPIO-0.6.2.dist-info/METADATA,sha256=_m7PpAOt0icvoUp0OCBMJRRu-HCWgyoWnSg-4Bnn9Jg,6988 +RPi.GPIO-0.6.2.dist-info/RECORD,, +RPi.GPIO-0.6.2.dist-info/WHEEL,sha256=qlXKyUxaKdKGVZKHdQaMFOPNWg5qC8OVyb-aicJwJ6U,104 +RPi.GPIO-0.6.2.dist-info/metadata.json,sha256=tQq4Ev0lJJDCwtSI_mlRDqSWLcCT9vEVAO9VKv7fg8Y,834 +RPi.GPIO-0.6.2.dist-info/top_level.txt,sha256=D2ebmx5QNuKCb-J2LbVMXkkhIVNpXvmxlOVIZjPsALw,4 +RPi.GPIO-0.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +RPi/GPIO/__pycache__/__init__.cpython-34.pyc,, +RPi/__pycache__/__init__.cpython-34.pyc,, diff --git a/deps/RPi.GPIO-0.6.2.dist-info/WHEEL b/deps/RPi.GPIO-0.6.2.dist-info/WHEEL new file mode 100644 index 00000000..982a5e31 --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp34-cp34m-linux_armv7l + diff --git a/deps/RPi.GPIO-0.6.2.dist-info/metadata.json b/deps/RPi.GPIO-0.6.2.dist-info/metadata.json new file mode 100644 index 00000000..dd89cac4 --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Operating System :: POSIX :: Linux", "License :: OSI Approved :: MIT License", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Topic :: Software Development", "Topic :: Home Automation", "Topic :: System :: Hardware"], "extensions": {"python.details": {"contacts": [{"email": "ben@croston.org", "name": "Ben Croston", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://sourceforge.net/projects/raspberry-gpio-python/"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["Raspberry", "Pi", "GPIO"], "license": "MIT", "metadata_version": "2.0", "name": "RPi.GPIO", "summary": "A module to control Raspberry Pi GPIO channels", "version": "0.6.2"} \ No newline at end of file diff --git a/deps/RPi.GPIO-0.6.2.dist-info/top_level.txt b/deps/RPi.GPIO-0.6.2.dist-info/top_level.txt new file mode 100644 index 00000000..196e1e3d --- /dev/null +++ b/deps/RPi.GPIO-0.6.2.dist-info/top_level.txt @@ -0,0 +1 @@ +RPi diff --git a/deps/RPi/GPIO/__init__.py b/deps/RPi/GPIO/__init__.py new file mode 100644 index 00000000..6b7adad3 --- /dev/null +++ b/deps/RPi/GPIO/__init__.py @@ -0,0 +1,25 @@ +""" +Copyright (c) 2012-2016 Ben Croston + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" + +from RPi._GPIO import * + +VERSION = '0.6.2' diff --git a/deps/RPi/GPIO/__pycache__/__init__.cpython-34.pyc b/deps/RPi/GPIO/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..cecbb148 Binary files /dev/null and b/deps/RPi/GPIO/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/RPi/_GPIO.cpython-34m.so b/deps/RPi/_GPIO.cpython-34m.so new file mode 100755 index 00000000..de8a3e18 Binary files /dev/null and b/deps/RPi/_GPIO.cpython-34m.so differ diff --git a/deps/RPi/__init__.py b/deps/RPi/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/deps/RPi/__pycache__/__init__.cpython-34.pyc b/deps/RPi/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..dc654f72 Binary files /dev/null and b/deps/RPi/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/SQLAlchemy-1.0.15.dist-info/DESCRIPTION.rst b/deps/SQLAlchemy-1.0.15.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..479eaf5c --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/DESCRIPTION.rst @@ -0,0 +1,137 @@ +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/deps/SQLAlchemy-1.0.15.dist-info/INSTALLER b/deps/SQLAlchemy-1.0.15.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/SQLAlchemy-1.0.15.dist-info/METADATA b/deps/SQLAlchemy-1.0.15.dist-info/METADATA new file mode 100644 index 00000000..cdb9e8ff --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/METADATA @@ -0,0 +1,157 @@ +Metadata-Version: 2.0 +Name: SQLAlchemy +Version: 1.0.15 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent + +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/deps/SQLAlchemy-1.0.15.dist-info/RECORD b/deps/SQLAlchemy-1.0.15.dist-info/RECORD new file mode 100644 index 00000000..3c0ed423 --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/RECORD @@ -0,0 +1,376 @@ +SQLAlchemy-1.0.15.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013 +SQLAlchemy-1.0.15.dist-info/METADATA,sha256=PXrsCp9uV7PWUcEVkGZfGxqVJ9hPqrmf4bF01OegS74,5786 +SQLAlchemy-1.0.15.dist-info/RECORD,, +SQLAlchemy-1.0.15.dist-info/WHEEL,sha256=qlXKyUxaKdKGVZKHdQaMFOPNWg5qC8OVyb-aicJwJ6U,104 +SQLAlchemy-1.0.15.dist-info/metadata.json,sha256=6ZjUYl1zEtn3LvCDChujq-h7i3ttPnXQ-Qa0ZTVQ3Ps,965 +SQLAlchemy-1.0.15.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=TXDtb6mvt-pw89t8oryfyzN1O_-tLB008zsHbspt6A0,2129 +sqlalchemy/cprocessors.cpython-34m.so,sha256=il80Q4miLp0Jrhy98MHAXTV8TATdlZVuY9GuoKo_fzQ,34448 +sqlalchemy/cresultproxy.cpython-34m.so,sha256=WtHt8ezIDtS12UIvsrMDudamGPk4AMdc10HXsXLcItU,42664 +sqlalchemy/cutils.cpython-34m.so,sha256=UAduuJiZ7iABA3VMMcdmp-FqqgIm-XmbjgZhtITrrzI,19276 +sqlalchemy/events.py,sha256=4gPVD8hBxWEUrzMpGiNcmDrpCMRaSqvps04offK-fVo,43942 +sqlalchemy/exc.py,sha256=NhA5R5nDdducWkp0MXtlQ0-Q6iF_rhqkHWblIfuSYGk,11706 +sqlalchemy/inspection.py,sha256=zMa-2nt-OQ0Op1dqq0Z2XCnpdAFSTkqif5Kdi8Wz8AU,3093 +sqlalchemy/interfaces.py,sha256=XSx5y-HittAzc79lU4C7rPbTtSW_Hc2c89NqCy50tsQ,10967 +sqlalchemy/log.py,sha256=opX7UORq5N6_jWxN9aHX9OpiirwAcRA0qq-u5m4SMkQ,6712 +sqlalchemy/pool.py,sha256=-F51TIJYl0XGTV2_sdpV8C1m0jTTQaq0nAezdmSgr84,47220 +sqlalchemy/processors.py,sha256=Li1kdC-I0v03JxeOz4V7u4HAevK6LledyCPvaL06mYc,5220 +sqlalchemy/schema.py,sha256=af2xp2pyTsbPLrSIBr4aOjc25bjnqku_uXL9pqEltx8,1200 +sqlalchemy/types.py,sha256=qcoy5xKaurDV4kaXr489GL2sz8FKkWX21Us3ZCqeasg,1650 +sqlalchemy/connectors/__init__.py,sha256=97YbriYu5mcljh7opc1JOScRlf3Tk8ldbn5urBVm4WY,278 +sqlalchemy/connectors/mxodbc.py,sha256=4giS6L3cIe3JBtTAg1pZpxuIJKd7xV1Cq7YUf5Mea-8,5347 +sqlalchemy/connectors/pyodbc.py,sha256=pG2yf3cEDtTr-w_m4to6jF5l8hZk6MJv69K3cg84NfY,6264 +sqlalchemy/connectors/zxJDBC.py,sha256=2KK_sVSgMsdW0ufZqAwgXjd1FsMb4hqbiUQRAkM0RYg,1868 +sqlalchemy/databases/__init__.py,sha256=BaQyAuMjXNpZYV47hCseHrDtPzTfSw-iqUQYxMWJddw,817 +sqlalchemy/dialects/__init__.py,sha256=7SMul8PL3gkbJRUwAwovHLae5qBBApRF-VcRwU-VtdU,1012 +sqlalchemy/dialects/postgres.py,sha256=heNVHys6E91DIBepXT3ls_4_6N8HTTahrZ49W5IR3M0,614 +sqlalchemy/dialects/firebird/__init__.py,sha256=QYmQ0SaGfq3YjDraCV9ALwqVW5A3KDUF0F6air_qp3Q,664 +sqlalchemy/dialects/firebird/base.py,sha256=IT0prWkh1TFSTke-BqGdVMGdof53zmWWk6zbJZ_TuuI,28170 +sqlalchemy/dialects/firebird/fdb.py,sha256=l4s6_8Z0HvqxgqGz0LNcKWP1qUmEc3M2XM718_drN34,4325 +sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=kCsn2ed4u9fyjcyfEI3rXQdKvL05z9wtf5YjW9-NrvI,6299 +sqlalchemy/dialects/mssql/__init__.py,sha256=G12xmirGZgMzfUKZCA8BFfaCmqUDuYca9Fu2VP_eaks,1081 +sqlalchemy/dialects/mssql/adodbapi.py,sha256=dHZgS3pEDX39ixhlDfTtDcjCq6rdjF85VS7rIZ1TfYo,2493 +sqlalchemy/dialects/mssql/base.py,sha256=FQ9DNxTBqnFInTNmffcmZeySpj-_53l_aQFxNWB2ljY,68991 +sqlalchemy/dialects/mssql/information_schema.py,sha256=pwuTsgOCY5eSBW9w-g-pyJDRfyuZ_rOEXXNYRuAroCE,6418 +sqlalchemy/dialects/mssql/mxodbc.py,sha256=G9LypIeEizgxeShtDu2M7Vwm8NopnzaTmnZMD49mYeg,3856 +sqlalchemy/dialects/mssql/pymssql.py,sha256=fQE2el6WDwm8EeFqNn9qYXyw_oFPFqidA2zd1fXs8G0,3080 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=LAamdDoPAMSTa0I-51PlJ_sVvyM5M4f99XQcz9mMZR8,9653 +sqlalchemy/dialects/mssql/zxjdbc.py,sha256=u4uBgwk0LbI7_I5CIvM3C4bBb0pmrw2_DqRh_ehJTkI,2282 +sqlalchemy/dialects/mysql/__init__.py,sha256=3cQ2juPT8LsZTicPa2J-0rCQjQIQaPgyBzxjV3O_7xs,1171 +sqlalchemy/dialects/mysql/base.py,sha256=HZA1lxTNHYRVxeMeAw8RwJz2lYbRRYY1qJ5J_SujC9Q,123316 +sqlalchemy/dialects/mysql/cymysql.py,sha256=nqsdQA8LBLIc6eilgX6qwkjm7szsUoqMTVYwK9kkfsE,2349 +sqlalchemy/dialects/mysql/gaerdbms.py,sha256=2MxtTsIqlpq_J32HHqDzz-5vu-mC51Lb7PvyGkJa73M,3387 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=K7HvQzLUZGm_pIL2ppltfjGIHPsVlr6R-F50dMg2V7g,6106 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=McqROngxAknbLOXoUAG9o9mP9FQBLs-ouD-JqqI2Ses,6564 +sqlalchemy/dialects/mysql/oursql.py,sha256=rmdr-r66iJ2amqFeGvCohvE8WCl_i6R9KcgVG0uXOQs,8124 +sqlalchemy/dialects/mysql/pymysql.py,sha256=e-qehI-sASmAjEa0ajHqjZjlyJYWsb3RPQY4iBR5pz0,1504 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=Ze9IOKw6ANVQj25IlmSGR8aaJhM0pMuRtbzKF7UsZCY,2665 +sqlalchemy/dialects/mysql/zxjdbc.py,sha256=LIhe2mHSRVgi8I7qmiTMVBRSpuWJVnuDtpHTUivIx0M,3942 +sqlalchemy/dialects/oracle/__init__.py,sha256=UhF2ZyPfT3EFAnP8ZjGng6GnWSzmAkjMax0Lucpn0Bg,797 +sqlalchemy/dialects/oracle/base.py,sha256=9EnyRy5oRf528Njj1dHohB8_1Gh3FivcLcSovH8w7Q0,56790 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=rQPBYvlS0KZIcw4Pg1ARlGKdmOUcR0xsGge0CXVhxfs,38765 +sqlalchemy/dialects/oracle/zxjdbc.py,sha256=nC7XOCY3NdTLrEyIacNTnLDCaeVjWn59q8UYssJL8Wo,8112 +sqlalchemy/dialects/postgresql/__init__.py,sha256=SjCtM5b3EaGyRaTyg_i82sh_qjkLEIVUXW91XDihiCM,1299 +sqlalchemy/dialects/postgresql/base.py,sha256=SJ_gCtLwlrhR-h02Vi4dn5bNI1eHdZXTZKf1nBwXJ8g,104340 +sqlalchemy/dialects/postgresql/constraints.py,sha256=8UDx_2TNQgqIUSRETZPhgninJigQ6rMfdRNI6vIt3Is,3119 +sqlalchemy/dialects/postgresql/hstore.py,sha256=n8Wsd7Uldk3bbg66tTa0NKjVqjhJUbF1mVeUsM7keXA,11402 +sqlalchemy/dialects/postgresql/json.py,sha256=MTlIGinMDa8iaVbZMOzYnremo0xL4tn2wyGTPwnvX6U,12215 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=x6o3P8Ad0wKsuF9qeyip39BKc5ORJZ4nWxv-8qOdj0E,8375 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=Z6ubvg7bzVBBiyTebyvf1WGX4MgJlryRHHzyLQp3qEU,27019 +sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=8R3POkJH8z8a2DxwKNmfmQOsxFqsg4tU_OnjGj3OfDA,1651 +sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=raQRfZb8T9-c-jmq1w86Wci5QyiXgf_9_71OInT_sAw,2655 +sqlalchemy/dialects/postgresql/ranges.py,sha256=MihdGXMdmCM6ToIlrj7OJx9Qh_8BX8bv5PSaAepHmII,4814 +sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=AhEGRiAy8q-GM0BStFcsLBgSwjxHkkwy2-BSroIoADo,1397 +sqlalchemy/dialects/sqlite/__init__.py,sha256=0wW0VOhE_RtFDpRcbwvvo3XtD6Y2-SDgG4K7468eh_w,736 +sqlalchemy/dialects/sqlite/base.py,sha256=_L9-854ITf8Fl2BgUymF9fKjDFvXSo7Pb2yuz1CMkDo,55007 +sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=sgXCqn8ZtNIeTDwyo253Kj5mn4TPlIW3AZCNNmURi2A,4129 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=G-Cg-iI-ErYsVjOH4UlQTEY9pLnLOLV89ik8q0-reuY,14980 +sqlalchemy/dialects/sybase/__init__.py,sha256=gwCgFR_C_hoj0Re7PiaW3zmKSWaLpsd96UVXdM7EnTM,894 +sqlalchemy/dialects/sybase/base.py,sha256=Xpl3vEd5VDyvoIRMg0DZa48Or--yBSrhaZ2CbTSCt0w,28853 +sqlalchemy/dialects/sybase/mxodbc.py,sha256=E_ask6yFSjyhNPvv7gQsvA41WmyxbBvRGWjCyPVr9Gs,901 +sqlalchemy/dialects/sybase/pyodbc.py,sha256=0a_gKwrIweJGcz3ZRYuQZb5BIvwjGmFEYBo9wGk66kI,2102 +sqlalchemy/dialects/sybase/pysybase.py,sha256=tu2V_EbtgxWYOvt-ybo5_lLiBQzsIFaAtF8e7S1_-rk,3208 +sqlalchemy/engine/__init__.py,sha256=orab-ubkvGHzmhExRx2e6zg1hvNOiF1AU-i48xMqcvc,18837 +sqlalchemy/engine/base.py,sha256=HryAJKI1-jeViL8WVNFSjvBWfT9G20wUFRpcybW373s,79395 +sqlalchemy/engine/default.py,sha256=U_yaliCazUHp6cfk_NVzhB4F_zOJSyy959rHyk40J4M,36548 +sqlalchemy/engine/interfaces.py,sha256=CmPYM_oDp1zAPH13sKmufO4Tuha6KA-fXRQq-K_3YTE,35908 +sqlalchemy/engine/reflection.py,sha256=jk5_oCt6oFbwznVTGXkhgj7L9xUziL1cMsuvTLyafUA,28653 +sqlalchemy/engine/result.py,sha256=Qh8w8mbOPcXPpVoMfwLkBts0gT-_6LASYsCZ6BuvnJA,44360 +sqlalchemy/engine/strategies.py,sha256=mwy-CTrnXzyaIA1TRQBQ_Z2O8wN0lnTNZwDefEWCR9A,8929 +sqlalchemy/engine/threadlocal.py,sha256=y4wOLjtbeY-dvp2GcJDtos6F2jzfP11JVAaSFwZ0zRM,4191 +sqlalchemy/engine/url.py,sha256=ZhS_Iqiu6V1kfIM2pcv3ud9fOPXkFOHBv8wiLOqbJhc,8228 +sqlalchemy/engine/util.py,sha256=Tvb9sIkyd6qOwIA-RsBmo5j877UXa5x-jQmhqnhHWRA,2338 +sqlalchemy/event/__init__.py,sha256=KnUVp-NVX6k276ntGffxgkjVmIWR22FSlzrbAKqQ6S4,419 +sqlalchemy/event/api.py,sha256=O2udbj5D7HdXcvsGBQk6-dK9CAFfePTypWOrUdqmhYY,5990 +sqlalchemy/event/attr.py,sha256=VfRJJl4RD24mQaIoDwArWL2hsGOX6ISSU6vKusVMNO0,12053 +sqlalchemy/event/base.py,sha256=DWDKZV19fFsLavu2cXOxXV8NhO3XuCbKcKamBKyXuME,9540 +sqlalchemy/event/legacy.py,sha256=ACnVeBUt8uwVfh1GNRu22cWCADC3CWZdrsBKzAd6UQQ,5814 +sqlalchemy/event/registry.py,sha256=13wx1qdEmcQeCoAmgf_WQEMuR43h3v7iyd2Re54QdOE,7786 +sqlalchemy/ext/__init__.py,sha256=smCZIGgjJprT4ddhuYSLZ8PrTn4NdXPP3j03a038SdE,322 +sqlalchemy/ext/associationproxy.py,sha256=y61Y4UIZNBit5lqk2WzdHTCXIWRrBg3hHbRVsqXjnqE,33422 +sqlalchemy/ext/automap.py,sha256=JXZ4SgtnU0SuNMxs9iwWpBf1WAd44HDS0u1skn3wWKI,41566 +sqlalchemy/ext/baked.py,sha256=1ny-NbAxFbg82U8RxJLMqZGg-yBP7I5cOIZkO36XsX0,17000 +sqlalchemy/ext/compiler.py,sha256=aSSlySoTsqN-JkACWFIhv3pq2CuZwxKm6pSDfQoc10Q,16257 +sqlalchemy/ext/horizontal_shard.py,sha256=XEBYIfs0YrTt_2vRuaBY6C33ZOZMUHQb2E4X2s3Szns,4814 +sqlalchemy/ext/hybrid.py,sha256=wNXvuYEEmKy-Nc6z7fu1c2gNWCMOiQA0N14Y3FCq5lo,27989 +sqlalchemy/ext/instrumentation.py,sha256=HRgNiuYJ90_uSKC1iDwsEl8_KXscMQkEb9KeElk-yLE,14856 +sqlalchemy/ext/mutable.py,sha256=QvIb2-tLqgR3Xdq94y8zJqX9cxfIUgJ5K7aDV6HIlww,25440 +sqlalchemy/ext/orderinglist.py,sha256=UCkuZxTWAQ0num-b5oNm8zNJAmVuIFcbFXt5e7JPx-U,13816 +sqlalchemy/ext/serializer.py,sha256=fK3N1miYF16PSIZDjLFS2zI7y-scZ9qtmopXIfzPqrA,5586 +sqlalchemy/ext/declarative/__init__.py,sha256=Jpwf2EukqwNe4RzDfCmX1p-hQ6pPhJEIL_xunaER3tw,756 +sqlalchemy/ext/declarative/api.py,sha256=SHjIAi5ObpEy-NOZhlsWeVscEMmn9qgc187TRi9jR_8,23322 +sqlalchemy/ext/declarative/base.py,sha256=ZVSQ-6ifPKpnSyoD4OjZk_oJUkgMPdRGi-Obbn6C6MM,25290 +sqlalchemy/ext/declarative/clsregistry.py,sha256=jaLLSr-66XvLnA1Z9kxjKatH_XHxWchqEXMKwvjKAXk,10817 +sqlalchemy/orm/__init__.py,sha256=UzDockQEVMaWvr-FE4y1rptrMb5uX5k8v_UNQs82qFY,8033 +sqlalchemy/orm/attributes.py,sha256=OmXkppJEZxRGc0acZZZkSbUhdfDl8ry3Skmvzl3OtLQ,56510 +sqlalchemy/orm/base.py,sha256=nS21na3Yx76UJzhWjzPLud1Ny0Xbmqx2DZQpVpHxHQM,14668 +sqlalchemy/orm/collections.py,sha256=TFutWIn_c07DI48FDOKMsFMnAoQB3BG2FnEMGzEF3iI,53549 +sqlalchemy/orm/dependency.py,sha256=phB8nS1788FSd4dWa2j9d4uj6QFlRL7nzcXvh3Bb7Zo,46192 +sqlalchemy/orm/deprecated_interfaces.py,sha256=A63t6ivbZB3Wq8vWgL8I05uTRR6whcWnIPkquuTIPXU,18254 +sqlalchemy/orm/descriptor_props.py,sha256=uk5r77w1VUWVgn0bkgOItkAlMh9FRgeT6OCgOHz3_bM,25141 +sqlalchemy/orm/dynamic.py,sha256=I_YP7X-H9HLjeFHmYgsOas6JPdqg0Aqe0kaltt4HVzA,13283 +sqlalchemy/orm/evaluator.py,sha256=o9E_mF3gPRa9HF_pNu-5twDe7865eFgO1FSCfoUB71s,4813 +sqlalchemy/orm/events.py,sha256=yRaoXlBL78b3l11itTrAy42UhLu42-7cgXKCFUGNXSg,69410 +sqlalchemy/orm/exc.py,sha256=P5lxi5RMFokiHL136VBK0AP3UmAlJcSDHtzgo-M6Kgs,5439 +sqlalchemy/orm/identity.py,sha256=zsb8xOZaPYKvs4sGhyxW21mILQDrtdSuzD4sTyeKdJs,9021 +sqlalchemy/orm/instrumentation.py,sha256=xtq9soM3mpMws7xqNJIFYXqKw65p2nnxCTfmMpuvpeI,17510 +sqlalchemy/orm/interfaces.py,sha256=AqitvZ_BBkB6L503uhdH55nxHplleJ2kQMwM7xKq9Sc,21552 +sqlalchemy/orm/loading.py,sha256=ZlxQszfG776WPVd5EHzPMdYat5IgmFltQ7QErMU3dtI,22885 +sqlalchemy/orm/mapper.py,sha256=LKnaVLyDk4AjTdANzDq03R2XyI-SlkSsT50dUaOlsqI,115131 +sqlalchemy/orm/path_registry.py,sha256=8Pah0P8yPVUyRjoET7DvIMGtM5PC8HZJC4GtxAyqVAs,8370 +sqlalchemy/orm/persistence.py,sha256=WzUUNm1UGm5mGxbv94hLTQowEDNoXfU1VoyGnoKeN_g,51028 +sqlalchemy/orm/properties.py,sha256=HR3eoY3Ze3FUPPNCXM_FruWz4pEMWrGlqtCGiK2G1qE,10426 +sqlalchemy/orm/query.py,sha256=frir4h863dRbadKCdys5XeBClZ-SDcvupVgKLaN6Dlo,148267 +sqlalchemy/orm/relationships.py,sha256=79LRGGz8MxsKsAlv0vuZ6MYZXzDXXtfiOCZg-IQ9hiU,116992 +sqlalchemy/orm/scoping.py,sha256=Ao-K4iqg4pBp7Si5JOAlro5zUL_r500TC3lVLcFMLDs,6421 +sqlalchemy/orm/session.py,sha256=yctpvCsLUcFv9Sy8keT1SElZ2VH5DNScYtO7Z77ptYI,111314 +sqlalchemy/orm/state.py,sha256=4LwwftOtPQldH12SKZV2UFgzqPOCj40QfQ08knZs0_E,22984 +sqlalchemy/orm/strategies.py,sha256=wvl6-ZwL28YJsJPY6xuHAYYnjk8k9fAbYqupvfvQ_Xo,58947 +sqlalchemy/orm/strategy_options.py,sha256=LqJWCzML6YCI_toThom_bvfZQAOj6WIb3MrO9K7K6bo,34974 +sqlalchemy/orm/sync.py,sha256=B-d-H1Gzw1TkflpvgJeQghwTzqObzhZCQdvEdSPyDeE,5451 +sqlalchemy/orm/unitofwork.py,sha256=EQvZ7RZ-u5wJT51BWTeMJJi-tt22YRnmqywGUCn0Qrc,23343 +sqlalchemy/orm/util.py,sha256=hMlZbgA8Qji9--EAahwzHsyKeR_GCEKwFZWjR4kgPsk,38077 +sqlalchemy/sql/__init__.py,sha256=IFCJYIilmmAQRnSDhv9Y6LQUSpx6pUU5zp9VT7sOx0c,1737 +sqlalchemy/sql/annotation.py,sha256=8ncgAVUo5QCoinApKjREi8esWNMFklcBqie8Q42KsaQ,6136 +sqlalchemy/sql/base.py,sha256=TuXOp7z0Q30qKAjhgcsts6WGvRbvg6F7OBojMQAxjX0,20990 +sqlalchemy/sql/compiler.py,sha256=4szeiIUoO6kgj37d8skkDVdPJw5ZxYW6KmyTDDmnK3U,100569 +sqlalchemy/sql/crud.py,sha256=b-o2vT2CV2hIxdky9NpzvgEMHjbKxvF4tMgGdU4mLvs,19837 +sqlalchemy/sql/ddl.py,sha256=nkjd_B4lKwC2GeyPjE0ZtRB9RKXccQL1g1XoZ4p69sM,37540 +sqlalchemy/sql/default_comparator.py,sha256=NqvS_PgULqSaeJR25IjB2mUMy0R2O8avX0i9ge-gDi4,10464 +sqlalchemy/sql/dml.py,sha256=7846H52IMJfMYi5Jd-Cv6Hy9hZM4dkonXbjfBjl5ED4,33330 +sqlalchemy/sql/elements.py,sha256=okDQjYYhPucEX5OmP6XFxWUauFLrpV3ucwIisTSVVGE,133812 +sqlalchemy/sql/expression.py,sha256=vFZ9MmBlC9Fg8IYzLMAwXgcsnXZhkZbUstY6dO8BFGY,5833 +sqlalchemy/sql/functions.py,sha256=CV-L1qZDfNx378--oh_g6I7BQomMfDrjOmwNT6JxkAA,18669 +sqlalchemy/sql/naming.py,sha256=foE2lAzngLCFXCeHrpv0S4zT23GCnZLCiata2MPo0kE,4662 +sqlalchemy/sql/operators.py,sha256=UeZgb7eRhWd4H7OfJZkx0ZWOjvo5chIUXQsBAIeeTDY,23013 +sqlalchemy/sql/schema.py,sha256=df0R2rXhjwCkKGHDshHy_mdrYJvWQ27OjAqhvk-xy9E,147681 +sqlalchemy/sql/selectable.py,sha256=BT5RdjMrV026gLX1ThF_q1NFtEr2Op78yS-OUi3SYsc,119573 +sqlalchemy/sql/sqltypes.py,sha256=JGxizqIjO1WFuZpppWj1Yi5cvCyBczb1JqUQeuhQn8s,54879 +sqlalchemy/sql/type_api.py,sha256=LDXlmstH0rIVosJy2KioqRq2eO25EHV9NJeLlKuZD88,42846 +sqlalchemy/sql/util.py,sha256=GhTktynNUK9LROR9YYSO0idy6mu6riDUkm-gt8bkfYI,20629 +sqlalchemy/sql/visitors.py,sha256=4ipGvAkqFaSAWgyNuKjx5x_ms8GIy9aq-wC5pj4-Z3g,10271 +sqlalchemy/testing/__init__.py,sha256=MwKimX0atzs_SmG2j74GXLiyI8O56e3DLq96tcoL0TM,1095 +sqlalchemy/testing/assertions.py,sha256=r1I2nHC599VZcY-5g0JYRQl8bl9kjkf6WFOooOmJ2eE,16112 +sqlalchemy/testing/assertsql.py,sha256=-fP9Iuhdu52BJoT1lEj_KED8jy5ay_XiJu7i4Ry9eWA,12335 +sqlalchemy/testing/config.py,sha256=nqvVm55Vk0BVNjk1Wj3aYR65j_EEEepfB-W9QSFLU-k,2469 +sqlalchemy/testing/distutils_run.py,sha256=tkURrZRwgFiSwseKm1iJRkSjKf2Rtsb3pOXRWtACTHI,247 +sqlalchemy/testing/engines.py,sha256=u6GlDMXt0FKqVTQe_QJ5JXAnkA6W-xdw6Fe_5gMAQhg,9359 +sqlalchemy/testing/entities.py,sha256=IXqTgAihV-1TZyxL0MWdZzu4rFtxdbWKWFetIJWNGM4,2992 +sqlalchemy/testing/exclusions.py,sha256=WuH_tVK5fZJWe8Hu2LzNB4HNQMa_iAUaGC-_6mHUdIM,12570 +sqlalchemy/testing/fixtures.py,sha256=q4nK-81z2EWs17TjeJtPmnaJUCtDdoUiIU7jgLq3l_w,10721 +sqlalchemy/testing/mock.py,sha256=vj5q-GzJrLW6mMVDLqsppxBu_p7K49VvjfiVt5tn0o8,630 +sqlalchemy/testing/pickleable.py,sha256=8I8M4H1XN29pZPMxZdYkmpKWfwzPsUn6WK5FX4UP9L4,2641 +sqlalchemy/testing/profiling.py,sha256=Q_wOTS5JtcGBcs2eCYIvoRoDS_FW_HcfEW3hXWB87Zg,8392 +sqlalchemy/testing/provision.py,sha256=DW1OD8lXK4mv0E0ibTPJoq_TwjBSNFPzrk0OktbZXDw,9388 +sqlalchemy/testing/replay_fixture.py,sha256=iAxg7XsFkKSCcJnrNPQNJfjMxOgeBAa-ShOkywWPJ4w,5429 +sqlalchemy/testing/requirements.py,sha256=aIdvbfugMzrlVdldEbpcwretX-zjiukPhPUSZgulrzU,19949 +sqlalchemy/testing/runner.py,sha256=hpNH6MNTif4TnBRySxpm92KgFwDK0mOa8eF7wZXumTI,1607 +sqlalchemy/testing/schema.py,sha256=agOzrIMvmuUCeVZY5mYjJ1eJmOP69-wa0gZALtNtJBk,3446 +sqlalchemy/testing/util.py,sha256=IJ688AWzichtXVwWgYf_A4BUbcXPGsK6BQP5fvY3h-U,7544 +sqlalchemy/testing/warnings.py,sha256=-KskRAh1RkJ_69UIY_WR7i15u21U3gDLQ6nKlnJT7_w,987 +sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/testing/plugin/bootstrap.py,sha256=Iw8R-d1gqoz_NKFtPyGfdX56QPcQHny_9Lvwov65aVY,1634 +sqlalchemy/testing/plugin/noseplugin.py,sha256=In79x6zs9DOngfoYpaHojihWlSd4PeS7Nwzh3M_KNM4,2847 +sqlalchemy/testing/plugin/plugin_base.py,sha256=bZvCglPhWVY6dyrkWFEjuJ3ccjt3VdRvbjE6oeNil8o,17335 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=o29RaERGgyakyA3ovnGzCq7rPBSjp6Gjf31Yy9jLw-4,6146 +sqlalchemy/testing/suite/__init__.py,sha256=wqCTrb28i5FwhQZOyXVlnz3mA94iQOUBio7lszkFq-g,471 +sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838 +sqlalchemy/testing/suite/test_dialect.py,sha256=ORQPXUt53XtO-5ENlWgs8BpsSdPBDjyMRl4W2UjXLI4,1165 +sqlalchemy/testing/suite/test_insert.py,sha256=nP0mgVpsVs72MHMADmihB1oXLbFBpsYsLGO3BlQ7RLU,8132 +sqlalchemy/testing/suite/test_reflection.py,sha256=HtJRsJ_vuNMrOhnPTvuIvRg66OakSaSpeCU36zhaSPg,24616 +sqlalchemy/testing/suite/test_results.py,sha256=oAcO1tD0I7c9ErMeSvSZBZfz1IBDMJHJTf64Y1pBodk,6685 +sqlalchemy/testing/suite/test_select.py,sha256=u0wAz1g-GrAFdZpG4zwSrVckVtjULvjlbd0Z1U1jHAA,5729 +sqlalchemy/testing/suite/test_sequence.py,sha256=fmBR4Pc5tOLSkXFxfcqwGx1z3xaxeJeUyqDnTakKTBU,3831 +sqlalchemy/testing/suite/test_types.py,sha256=UKa-ZPdpz16mVKvT-9ISRAfqdrqiKaE7IA-_phQQuxo,17088 +sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582 +sqlalchemy/util/__init__.py,sha256=G06a5vBxg27RtWzY6dPZHt1FO8qtOiy_2C9PHTTMblI,2520 +sqlalchemy/util/_collections.py,sha256=s41rf2YrYEfl6MTIp3x3WMzLvGP532PR9FD8-2VwivA,27889 +sqlalchemy/util/compat.py,sha256=gInErUyI0XdS590SIFSbjdmIdwn-hxFVyWYU12p_QqM,6873 +sqlalchemy/util/deprecations.py,sha256=D_LTsfb9jHokJtPEWNDRMJOc372xRGNjputAiTIysRU,4403 +sqlalchemy/util/langhelpers.py,sha256=Nhe3Y9ieK6JaFYejjYosVOjOSSIBT2V385Hu6HGcyZk,41607 +sqlalchemy/util/queue.py,sha256=rs3W0LDhKt7M_dlQEjYpI9KS-bzQmmwN38LE_-RRVvU,6548 +sqlalchemy/util/topological.py,sha256=xKsYjjAat4p8cdqRHKwibLzr6WONbPTC0X8Mqg7jYno,2794 +SQLAlchemy-1.0.15.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sqlalchemy/testing/__pycache__/entities.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/__pycache__/log.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc,, +sqlalchemy/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-34.pyc,, +sqlalchemy/__pycache__/processors.cpython-34.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-34.pyc,, +sqlalchemy/event/__pycache__/api.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc,, +sqlalchemy/__pycache__/pool.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc,, +sqlalchemy/__pycache__/types.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc,, +sqlalchemy/event/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/events.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/__pycache__/inspection.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-34.pyc,, +sqlalchemy/util/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/runner.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-34.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc,, +sqlalchemy/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-34.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-34.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-34.pyc,, +sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc,, +sqlalchemy/event/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc,, diff --git a/deps/SQLAlchemy-1.0.15.dist-info/WHEEL b/deps/SQLAlchemy-1.0.15.dist-info/WHEEL new file mode 100644 index 00000000..982a5e31 --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp34-cp34m-linux_armv7l + diff --git a/deps/SQLAlchemy-1.0.15.dist-info/metadata.json b/deps/SQLAlchemy-1.0.15.dist-info/metadata.json new file mode 100644 index 00000000..e34f8299 --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://www.sqlalchemy.org"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "summary": "Database Abstraction Library", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)", "pytest-xdist"]}], "version": "1.0.15"} \ No newline at end of file diff --git a/deps/SQLAlchemy-1.0.15.dist-info/top_level.txt b/deps/SQLAlchemy-1.0.15.dist-info/top_level.txt new file mode 100644 index 00000000..39fb2bef --- /dev/null +++ b/deps/SQLAlchemy-1.0.15.dist-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/deps/Werkzeug-0.11.11.dist-info/DESCRIPTION.rst b/deps/Werkzeug-0.11.11.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..2a6e8bb6 --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/DESCRIPTION.rst @@ -0,0 +1,54 @@ +Werkzeug +======== + +Werkzeug started as simple collection of various utilities for WSGI +applications and has become one of the most advanced WSGI utility +modules. It includes a powerful debugger, full featured request and +response objects, HTTP utilities to handle entity tags, cache control +headers, HTTP dates, cookie handling, file uploads, a powerful URL +routing system and a bunch of community contributed addon modules. + +Werkzeug is unicode aware and doesn't enforce a specific template +engine, database adapter or anything else. It doesn't even enforce +a specific way of handling requests and leaves all that up to the +developer. It's most useful for end user applications which should work +on as many server environments as possible (such as blogs, wikis, +bulletin boards, etc.). + +Details and example applications are available on the +`Werkzeug website `_. + + +Features +-------- + +- unicode awareness + +- request and response objects + +- various utility functions for dealing with HTTP headers such as + `Accept` and `Cache-Control` headers. + +- thread local objects with proper cleanup at request end + +- an interactive debugger + +- A simple WSGI server with support for threading and forking + with an automatic reloader. + +- a flexible URL routing system with REST support. + +- fully WSGI compatible + + +Development Version +------------------- + +The Werkzeug development version can be installed by cloning the git +repository from `github`_:: + + git clone git@github.com:mitsuhiko/werkzeug.git + +.. _github: http://github.com/mitsuhiko/werkzeug + + diff --git a/deps/Werkzeug-0.11.11.dist-info/INSTALLER b/deps/Werkzeug-0.11.11.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/Werkzeug-0.11.11.dist-info/METADATA b/deps/Werkzeug-0.11.11.dist-info/METADATA new file mode 100644 index 00000000..be3420f3 --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/METADATA @@ -0,0 +1,79 @@ +Metadata-Version: 2.0 +Name: Werkzeug +Version: 0.11.11 +Summary: The Swiss Army knife of Python web development +Home-page: http://werkzeug.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +Werkzeug +======== + +Werkzeug started as simple collection of various utilities for WSGI +applications and has become one of the most advanced WSGI utility +modules. It includes a powerful debugger, full featured request and +response objects, HTTP utilities to handle entity tags, cache control +headers, HTTP dates, cookie handling, file uploads, a powerful URL +routing system and a bunch of community contributed addon modules. + +Werkzeug is unicode aware and doesn't enforce a specific template +engine, database adapter or anything else. It doesn't even enforce +a specific way of handling requests and leaves all that up to the +developer. It's most useful for end user applications which should work +on as many server environments as possible (such as blogs, wikis, +bulletin boards, etc.). + +Details and example applications are available on the +`Werkzeug website `_. + + +Features +-------- + +- unicode awareness + +- request and response objects + +- various utility functions for dealing with HTTP headers such as + `Accept` and `Cache-Control` headers. + +- thread local objects with proper cleanup at request end + +- an interactive debugger + +- A simple WSGI server with support for threading and forking + with an automatic reloader. + +- a flexible URL routing system with REST support. + +- fully WSGI compatible + + +Development Version +------------------- + +The Werkzeug development version can be installed by cloning the git +repository from `github`_:: + + git clone git@github.com:mitsuhiko/werkzeug.git + +.. _github: http://github.com/mitsuhiko/werkzeug + + diff --git a/deps/Werkzeug-0.11.11.dist-info/RECORD b/deps/Werkzeug-0.11.11.dist-info/RECORD new file mode 100644 index 00000000..e45e364f --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/RECORD @@ -0,0 +1,94 @@ +werkzeug/posixemulation.py,sha256=xEF2Bxc-vUCPkiu4IbfWVd3LW7DROYAT-ExW6THqyzw,3519 +werkzeug/security.py,sha256=tuVc22OqoHV5K-TrYJmynCJJa12aUt9BQ3wR_vEPQ34,8971 +werkzeug/__init__.py,sha256=7sfvCWELBhe774-fzDoL1EU8GHsNG7iuxOkF0OnE5cc,6920 +werkzeug/testapp.py,sha256=3HQRW1sHZKXuAjCvFMet4KXtQG3loYTFnvn6LWt-4zI,9396 +werkzeug/http.py,sha256=i9tuTlN8sOy9car-esVsHET6AfNCnObbu1pRBjqExvs,35287 +werkzeug/routing.py,sha256=TqiZD5HkwdLBnKBUjC5PlytzXmpczQC5dz54VfQzMOw,66350 +werkzeug/utils.py,sha256=lkybtv_mq35zV1qhelvEcILTzrMUwZ9yon6E8XwapJE,22972 +werkzeug/exceptions.py,sha256=c-3fKHItsPvC52X_NwBNLcmGXR30h0WP5ynPSwCqPiw,18733 +werkzeug/_reloader.py,sha256=YQykMSQW7AlojJQ7qOlgNaXw5_CNjf9yzxplwzVdL7Q,8336 +werkzeug/formparser.py,sha256=90D5Urp8Ghrzw32kAs090G0nXPYlU73NeAzPlQFMVrY,21296 +werkzeug/_compat.py,sha256=8c4U9o6A_TR9nKCcTbpZNxpqCXcXDVIbFawwKM2s92c,6311 +werkzeug/datastructures.py,sha256=-W9uZXQ-HSAy_lGDh5oO6cPUE8bNtXoNQ6BaFG8B2Vs,87575 +werkzeug/wrappers.py,sha256=lKYevpKD1-quk9Cop7bsFxt1eWJxU3h33HCnOI_YzSU,77011 +werkzeug/test.py,sha256=pQMDJjKdjZVWd_BJfnXExE3NH5Ykr-LG5YT4giptWyw,34127 +werkzeug/urls.py,sha256=fSbI4Gb29_p02Zk21VAZQRN1QdOVY9CNTgpb2rbajNQ,36710 +werkzeug/script.py,sha256=DwaVDcXdaOTffdNvlBdLitxWXjKaRVT32VbhDtljFPY,11365 +werkzeug/useragents.py,sha256=uqpgPcJ5BfcCVh9nPIIl2r3duIrIuENmrbRqbAMmPDk,5418 +werkzeug/local.py,sha256=4Q5gwHQJhfhZFqTR8iQDs2VHohpR1OEsP4YTwn7rt7w,14275 +werkzeug/serving.py,sha256=uRUqXuA-Dw2MRA-d232cK_034-taldoj66fEFrtin7k,27736 +werkzeug/filesystem.py,sha256=hHWeWo_gqLMzTRfYt8-7n2wWcWUNTnDyudQDLOBEICE,2175 +werkzeug/wsgi.py,sha256=S8R3pBGPlBK67s-d6Wa93nhzG27WjfcHs_ZBGIAQCxM,39573 +werkzeug/_internal.py,sha256=IEScSoFtQ8KqFH_2ubdfscNAdQ2RIysyVupI5BR9W2U,13709 +werkzeug/contrib/fixers.py,sha256=MtN_YmENxoTsGvXGGERmtbQ62LaeFc5I2d1YifXNENA,10183 +werkzeug/contrib/limiter.py,sha256=iS8-ahPZ-JLRnmfIBzxpm7O_s3lPsiDMVWv7llAIDCI,1334 +werkzeug/contrib/__init__.py,sha256=f7PfttZhbrImqpr5Ezre8CXgwvcGUJK7zWNpO34WWrw,623 +werkzeug/contrib/testtools.py,sha256=G9xN-qeihJlhExrIZMCahvQOIDxdL9NiX874jiiHFMs,2453 +werkzeug/contrib/iterio.py,sha256=pTX36rYCKO_9IEoB5sIN5cFSYszI9zdx6YhquWovcPY,10814 +werkzeug/contrib/cache.py,sha256=4W2WCT9Hw6HEU8yME9GuU4Xf8e50r2K84ASMxhLb6tY,27983 +werkzeug/contrib/securecookie.py,sha256=X-Ao_0NRDveW6K1Fhe4U42hHWBW8esCpA3VcBDpzWIk,12206 +werkzeug/contrib/lint.py,sha256=XDKYx0ELn9k18xRn4SiAsCgltCuN4yLjzxnCN8tG_eM,12490 +werkzeug/contrib/profiler.py,sha256=ISwCWvwVyGpDLRBRpLjo_qUWma6GXYBrTAco4PEQSHY,5151 +werkzeug/contrib/wrappers.py,sha256=zcd-1yC-kZQOLnn8Bs2SzKUNn7z2H9f0DpcVYS5Ty8s,10380 +werkzeug/contrib/atom.py,sha256=rvijBrphjMzVObfuCR6ddu6aLwI_SiNiudu64OSTh4Q,15588 +werkzeug/contrib/jsrouting.py,sha256=QTmgeDoKXvNK02KzXgx9lr3cAH6fAzpwF5bBdPNvJPs,8564 +werkzeug/contrib/sessions.py,sha256=uAPcnyxaxEla-bUA13gKc3KK4mwSagdzbCZzyKl3PeE,12577 +werkzeug/debug/console.py,sha256=B7uAu9Rk60siDnGlEt-A_q1ZR4zCtmxx5itg3X-BOxo,5599 +werkzeug/debug/repr.py,sha256=NaoB89aHb0vuvdSWels-GWdeGDZp76uE4uSNZPX1jAM,9354 +werkzeug/debug/__init__.py,sha256=qQT5YnOv9Eov9Jt5eLtP6MOqwpmo-tORJ6HcQmmnvro,17271 +werkzeug/debug/tbtools.py,sha256=-BeFH40ISiF5DFn9RfHMQzCGzmFBovZnREyj-lLzptM,18410 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 +werkzeug/debug/shared/debugger.js,sha256=PEMBoNuD6fUaNou8Km_ZvVmFcIA3z3k3jSEMWLW-cA0,6187 +werkzeug/debug/shared/style.css,sha256=7x1s8olZO1XHalqD4M9MWn9vRqQkA635S9_6zRoe220,6231 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 +werkzeug/debug/shared/jquery.js,sha256=7LkWEzqTdpEfELxcZZlS6wAx5Ff13zZ83lYO2_ujj7g,95957 +werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +Werkzeug-0.11.11.dist-info/DESCRIPTION.rst,sha256=5sTwZ_Sj5aeEN8mlcOdNJ_ng40HiGazGmILLyTMX8o0,1595 +Werkzeug-0.11.11.dist-info/metadata.json,sha256=aFs0-WetLKLo-qWj4IIgXNtkOaDtX5H6YtZKJuHJVBc,1096 +Werkzeug-0.11.11.dist-info/RECORD,, +Werkzeug-0.11.11.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +Werkzeug-0.11.11.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +Werkzeug-0.11.11.dist-info/METADATA,sha256=yIzL70I-ZF_FswTj-dMsH_HvVIlmG8iqrq_lMsqp8WU,2600 +Werkzeug-0.11.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +werkzeug/__pycache__/wsgi.cpython-34.pyc,, +werkzeug/__pycache__/wrappers.cpython-34.pyc,, +werkzeug/debug/__pycache__/console.cpython-34.pyc,, +werkzeug/contrib/__pycache__/jsrouting.cpython-34.pyc,, +werkzeug/debug/__pycache__/__init__.cpython-34.pyc,, +werkzeug/contrib/__pycache__/fixers.cpython-34.pyc,, +werkzeug/__pycache__/http.cpython-34.pyc,, +werkzeug/contrib/__pycache__/limiter.cpython-34.pyc,, +werkzeug/contrib/__pycache__/atom.cpython-34.pyc,, +werkzeug/__pycache__/posixemulation.cpython-34.pyc,, +werkzeug/__pycache__/__init__.cpython-34.pyc,, +werkzeug/__pycache__/formparser.cpython-34.pyc,, +werkzeug/__pycache__/useragents.cpython-34.pyc,, +werkzeug/contrib/__pycache__/iterio.cpython-34.pyc,, +werkzeug/__pycache__/serving.cpython-34.pyc,, +werkzeug/__pycache__/security.cpython-34.pyc,, +werkzeug/__pycache__/local.cpython-34.pyc,, +werkzeug/contrib/__pycache__/sessions.cpython-34.pyc,, +werkzeug/__pycache__/utils.cpython-34.pyc,, +werkzeug/contrib/__pycache__/lint.cpython-34.pyc,, +werkzeug/__pycache__/_reloader.cpython-34.pyc,, +werkzeug/__pycache__/datastructures.cpython-34.pyc,, +werkzeug/__pycache__/test.cpython-34.pyc,, +werkzeug/debug/__pycache__/repr.cpython-34.pyc,, +werkzeug/contrib/__pycache__/__init__.cpython-34.pyc,, +werkzeug/__pycache__/exceptions.cpython-34.pyc,, +werkzeug/__pycache__/_compat.cpython-34.pyc,, +werkzeug/contrib/__pycache__/testtools.cpython-34.pyc,, +werkzeug/contrib/__pycache__/profiler.cpython-34.pyc,, +werkzeug/contrib/__pycache__/cache.cpython-34.pyc,, +werkzeug/__pycache__/routing.cpython-34.pyc,, +werkzeug/__pycache__/_internal.cpython-34.pyc,, +werkzeug/__pycache__/filesystem.cpython-34.pyc,, +werkzeug/__pycache__/testapp.cpython-34.pyc,, +werkzeug/__pycache__/urls.cpython-34.pyc,, +werkzeug/contrib/__pycache__/securecookie.cpython-34.pyc,, +werkzeug/contrib/__pycache__/wrappers.cpython-34.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-34.pyc,, +werkzeug/__pycache__/script.cpython-34.pyc,, diff --git a/deps/Werkzeug-0.11.11.dist-info/WHEEL b/deps/Werkzeug-0.11.11.dist-info/WHEEL new file mode 100644 index 00000000..9dff69d8 --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deps/Werkzeug-0.11.11.dist-info/metadata.json b/deps/Werkzeug-0.11.11.dist-info/metadata.json new file mode 100644 index 00000000..27f2dbe4 --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "BSD", "name": "Werkzeug", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "The Swiss Army knife of Python web development", "platform": "any", "version": "0.11.11", "extensions": {"python.details": {"project_urls": {"Home": "http://werkzeug.pocoo.org/"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "armin.ronacher@active-4.com", "name": "Armin Ronacher"}]}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules"]} \ No newline at end of file diff --git a/deps/Werkzeug-0.11.11.dist-info/top_level.txt b/deps/Werkzeug-0.11.11.dist-info/top_level.txt new file mode 100644 index 00000000..6fe8da84 --- /dev/null +++ b/deps/Werkzeug-0.11.11.dist-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/deps/__pycache__/astral.cpython-34.pyc b/deps/__pycache__/astral.cpython-34.pyc new file mode 100644 index 00000000..ff9a30ce Binary files /dev/null and b/deps/__pycache__/astral.cpython-34.pyc differ diff --git a/deps/__pycache__/phue.cpython-34.pyc b/deps/__pycache__/phue.cpython-34.pyc new file mode 100644 index 00000000..0384e61d Binary files /dev/null and b/deps/__pycache__/phue.cpython-34.pyc differ diff --git a/deps/__pycache__/six.cpython-34.pyc b/deps/__pycache__/six.cpython-34.pyc new file mode 100644 index 00000000..2d857fae Binary files /dev/null and b/deps/__pycache__/six.cpython-34.pyc differ diff --git a/deps/__pycache__/speedtest_cli.cpython-34.pyc b/deps/__pycache__/speedtest_cli.cpython-34.pyc new file mode 100644 index 00000000..54c035cd Binary files /dev/null and b/deps/__pycache__/speedtest_cli.cpython-34.pyc differ diff --git a/deps/__pycache__/static.cpython-34.pyc b/deps/__pycache__/static.cpython-34.pyc new file mode 100644 index 00000000..44046d9d Binary files /dev/null and b/deps/__pycache__/static.cpython-34.pyc differ diff --git a/deps/__pycache__/zeroconf.cpython-34.pyc b/deps/__pycache__/zeroconf.cpython-34.pyc new file mode 100644 index 00000000..cf76cc2f Binary files /dev/null and b/deps/__pycache__/zeroconf.cpython-34.pyc differ diff --git a/deps/astral-1.2.dist-info/DESCRIPTION.rst b/deps/astral-1.2.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..9e8e5bf7 --- /dev/null +++ b/deps/astral-1.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,22 @@ +This is 'astral' a Python module which calculates + + * Times for various positions of the sun: dawn, sunrise, solar noon, + sunset, dusk, solar elevation, solar azimuth and rahukaalam. + * The phase of the moon. + +For documentation see the http://pythonhosted.org/astral/ + +GoogleGeocoder +-------------- + +`GoogleGeocoder` uses the mapping services provided by Google + +Access to the `GoogleGeocoder` requires you to agree to be bound by +Google Maps/Google Earth APIs Terms of Service found at +https://developers.google.com/maps/terms which includes but is not limited to +having a Google Account. + +More information on Google's maps service can be found at +https://developers.google.com/maps/documentation/ + + diff --git a/deps/astral-1.2.dist-info/INSTALLER b/deps/astral-1.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/astral-1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/astral-1.2.dist-info/METADATA b/deps/astral-1.2.dist-info/METADATA new file mode 100644 index 00000000..b406caf6 --- /dev/null +++ b/deps/astral-1.2.dist-info/METADATA @@ -0,0 +1,37 @@ +Metadata-Version: 2.0 +Name: astral +Version: 1.2 +Summary: Calculations for the position of the sun and moon. +Home-page: https://launchpad.net/astral +Author: Simon Kennedy +Author-email: sffjunkie+code@gmail.com +License: Apache-2.0 +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Requires-Dist: pytz + +This is 'astral' a Python module which calculates + + * Times for various positions of the sun: dawn, sunrise, solar noon, + sunset, dusk, solar elevation, solar azimuth and rahukaalam. + * The phase of the moon. + +For documentation see the http://pythonhosted.org/astral/ + +GoogleGeocoder +-------------- + +`GoogleGeocoder` uses the mapping services provided by Google + +Access to the `GoogleGeocoder` requires you to agree to be bound by +Google Maps/Google Earth APIs Terms of Service found at +https://developers.google.com/maps/terms which includes but is not limited to +having a Google Account. + +More information on Google's maps service can be found at +https://developers.google.com/maps/documentation/ + + diff --git a/deps/astral-1.2.dist-info/RECORD b/deps/astral-1.2.dist-info/RECORD new file mode 100644 index 00000000..ccf5191f --- /dev/null +++ b/deps/astral-1.2.dist-info/RECORD @@ -0,0 +1,9 @@ +astral.py,sha256=2H2NtfjQifCApNMEshNuVREdZkmPT4ViYfRLdJR3MH8,90551 +astral-1.2.dist-info/DESCRIPTION.rst,sha256=LnKJ3t_LHOBmmFcaTJecRsXZZgLgoh1n36C-4OrOkFg,712 +astral-1.2.dist-info/METADATA,sha256=k7XKsuC2xSZ6m469EpMLcbHdXPkQ_B9RX00uowJzUqk,1165 +astral-1.2.dist-info/RECORD,, +astral-1.2.dist-info/WHEEL,sha256=9Z5Xm-eel1bTS7e6ogYiKz0zmPEqDwIypurdHN1hR40,116 +astral-1.2.dist-info/metadata.json,sha256=NRjJlS4IUbKVxDj42EqCtqYmOBjBlMYeFs7ehl7Lwyg,698 +astral-1.2.dist-info/top_level.txt,sha256=XMiS6N151F3-8h-ah-eVWJbJarm3kfI0XlpFa8pET2E,7 +astral-1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/astral.cpython-34.pyc,, diff --git a/deps/astral-1.2.dist-info/WHEEL b/deps/astral-1.2.dist-info/WHEEL new file mode 100644 index 00000000..ab4a09e7 --- /dev/null +++ b/deps/astral-1.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deps/astral-1.2.dist-info/metadata.json b/deps/astral-1.2.dist-info/metadata.json new file mode 100644 index 00000000..f96e9d23 --- /dev/null +++ b/deps/astral-1.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "sffjunkie+code@gmail.com", "name": "Simon Kennedy", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://launchpad.net/astral"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "license": "Apache-2.0", "metadata_version": "2.0", "name": "astral", "run_requires": [{"requires": ["pytz"]}], "summary": "Calculations for the position of the sun and moon.", "test_requires": [{"requires": ["tox"]}], "version": "1.2"} \ No newline at end of file diff --git a/deps/astral-1.2.dist-info/top_level.txt b/deps/astral-1.2.dist-info/top_level.txt new file mode 100644 index 00000000..ed18047d --- /dev/null +++ b/deps/astral-1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +astral diff --git a/deps/astral.py b/deps/astral.py new file mode 100644 index 00000000..64b7a0a5 --- /dev/null +++ b/deps/astral.py @@ -0,0 +1,2375 @@ +# -*- coding: utf-8 -*- + +# Copyright 2009-2016, Simon Kennedy, sffjunkie+code@gmail.com + +"""The :mod:`astral` module provides the means to calculate dawn, sunrise, +solar noon, sunset, dusk and rahukaalam times, plus solar azimuth and +elevation, for specific locations or at a specific latitude/longitude. It can +also calculate the moon phase for a specific date. + +The module provides 2 main classes :class:`Astral` and :class:`Location`. + +:class:`Astral` + Has 2 main responsibilities + + * Calculates the events in the UTC timezone. + * Provides access to location data + +:class:`Location` + Holds information about a location and provides functions to calculate + the event times for the location in the correct time zone. + +For example :: + + >>> from astral import * + >>> a = Astral() + >>> location = a['London'] + >>> print('Information for %s' % location.name) + Information for London + >>> timezone = location.timezone + >>> print('Timezone: %s' % timezone) + Timezone: Europe/London + >>> print('Latitude: %.02f; Longitude: %.02f' % (location.latitude, + ... location.longitude)) + Latitude: 51.60; Longitude: 0.05 + >>> from datetime import date + >>> d = date(2009,4,22) + >>> sun = location.sun(local=True, date=d) + >>> print('Dawn: %s' % str(sun['dawn'])) + Dawn: 2009-04-22 05:12:56+01:00 + +The module currently provides 2 methods of obtaining location information; +:class:`AstralGeocoder` (the default, which uses information from within the +module) and :class:`GoogleGeocoder` (which obtains information from Google's +Map Service.) + +To use the :class:`GoogleGeocoder` pass the class as the `geocoder` parameter +to :meth:`Astral.__init__` or by setting the `geocoder` property to an +instance of :class:`GoogleGeocoder`:: + + >>> from astral import GoogleGeocoder + >>> a = Astral(GoogleGeocoder) + +or :: + + >>> from astral import GoogleGeocoder + >>> a = Astral() + >>> a.geocoder = GoogleGeocoder() +""" + +from __future__ import unicode_literals + +try: + import pytz +except ImportError: + raise ImportError(('The astral module requires the ' + 'pytz module to be available.')) + +import datetime +from time import time +from math import cos, sin, tan, acos, asin, atan2, floor, ceil +from math import radians, degrees, pow +import sys + +try: + from urllib import quote_plus +except ImportError: + from urllib.parse import quote_plus + +try: + from urllib2 import urlopen, URLError +except ImportError: + from urllib.request import urlopen, URLError + +try: + import simplejson as json +except ImportError: + import json + +if sys.version_info[0] >= 3: + ustr = str +else: + ustr = unicode + +__all__ = ['Astral', 'Location', + 'AstralGeocoder', 'GoogleGeocoder', + 'AstralError'] + +__version__ = "1.2" +__author__ = "Simon Kennedy " + +SUN_RISING = 1 +SUN_SETTING = -1 + +# name,region,latitude,longitude,timezone,elevation +_LOCATION_INFO = """Abu Dhabi,UAE,24°28'N,54°22'E,Asia/Dubai,5 +Abu Dhabi,United Arab Emirates,24°28'N,54°22'E,Asia/Dubai,5 +Abuja,Nigeria,09°05'N,07°32'E,Africa/Lagos,342 +Accra,Ghana,05°35'N,00°06'W,Africa/Accra,61 +Addis Ababa,Ethiopia,09°02'N,38°42'E,Africa/Addis_Ababa,2355 +Adelaide,Australia,34°56'S,138°36'E,Australia/Adelaide,50 +Al Jubail,Saudi Arabia,25°24'N,49°39'W,Asia/Riyadh,8 +Algiers,Algeria,36°42'N,03°08'E,Africa/Algiers,224 +Amman,Jordan,31°57'N,35°52'E,Asia/Amman,1100 +Amsterdam,Netherlands,52°23'N,04°54'E,Europe/Amsterdam,2 +Andorra la Vella,Andorra,42°31'N,01°32'E,Europe/Andorra,1023 +Ankara,Turkey,39°57'N,32°54'E,Europe/Istanbul,938 +Antananarivo,Madagascar,18°55'S,47°31'E,Indian/Antananarivo,1276 +Apia,Samoa,13°50'S,171°50'W,Pacific/Apia,2 +Ashgabat,Turkmenistan,38°00'N,57°50'E,Asia/Ashgabat,219 +Asmara,Eritrea,15°19'N,38°55'E,Africa/Asmara,2325 +Astana,Kazakhstan,51°10'N,71°30'E,Asia/Qyzylorda,347 +Asuncion,Paraguay,25°10'S,57°30'W,America/Asuncion,124 +Athens,Greece,37°58'N,23°46'E,Europe/Athens,338 +Avarua,Cook Islands,21°12'N,159°46'W,Etc/GMT-10,208 +Baghdad,Iraq,33°20'N,44°30'E,Asia/Baghdad,41 +Baku,Azerbaijan,40°29'N,49°56'E,Asia/Baku,30 +Bamako,Mali,12°34'N,07°55'W,Africa/Bamako,350 +Bandar Seri Begawan,Brunei Darussalam,04°52'N,115°00'E,Asia/Brunei,1 +Bangkok,Thailand,13°45'N,100°35'E,Asia/Bangkok,2 +Bangui,Central African Republic,04°23'N,18°35'E,Africa/Bangui,373 +Banjul,Gambia,13°28'N,16°40'W,Africa/Banjul,5 +Basse-Terre,Guadeloupe,16°00'N,61°44'W,America/Guadeloupe,1 +Basseterre,Saint Kitts and Nevis,17°17'N,62°43'W,America/St_Kitts,50 +Beijing,China,39°55'N,116°20'E,Asia/Harbin,59 +Beirut,Lebanon,33°53'N,35°31'E,Asia/Beirut,56 +Belfast,Northern Ireland,54°36'N,5°56'W,Europe/Belfast,9 +Belgrade,Yugoslavia,44°50'N,20°37'E,Europe/Belgrade,90 +Belmopan,Belize,17°18'N,88°30'W,America/Belize,63 +Berlin,Germany,52°30'N,13°25'E,Europe/Berlin,35 +Bern,Switzerland,46°57'N,07°28'E,Europe/Zurich,510 +Bishkek,Kyrgyzstan,42°54'N,74°46'E,Asia/Bishkek,772 +Bissau,Guinea-Bissau,11°45'N,15°45'W,Africa/Bissau,0 +Bloemfontein,South Africa,29°12'S,26°07'E,Africa/Johannesburg,1398 +Bogota,Colombia,04°34'N,74°00'W,America/Bogota,2620 +Brasilia,Brazil,15°47'S,47°55'W,Brazil/East,1087 +Bratislava,Slovakia,48°10'N,17°07'E,Europe/Bratislava,132 +Brazzaville,Congo,04°09'S,15°12'E,Africa/Brazzaville,156 +Bridgetown,Barbados,13°05'N,59°30'W,America/Barbados,1 +Brisbane,Australia,27°30'S,153°01'E,Australia/Brisbane,25 +Brussels,Belgium,50°51'N,04°21'E,Europe/Brussels,62 +Bucharest,Romania,44°27'N,26°10'E,Europe/Bucharest,71 +Bucuresti,Romania,44°27'N,26°10'E,Europe/Bucharest,71 +Budapest,Hungary,47°29'N,19°05'E,Europe/Budapest,120 +Buenos Aires,Argentina,34°62'S,58°44'W,America/Buenos_Aires,6 +Bujumbura,Burundi,03°16'S,29°18'E,Africa/Bujumbura,782 +Cairo,Egypt,30°01'N,31°14'E,Africa/Cairo,74 +Canberra,Australia,35°15'S,149°08'E,Australia/Canberra,575 +Cape Town,South Africa,33°55'S,18°22'E,Africa/Johannesburg,1700 +Caracas,Venezuela,10°30'N,66°55'W,America/Caracas,885 +Castries,Saint Lucia,14°02'N,60°58'W,America/St_Lucia,125 +Cayenne,French Guiana,05°05'N,52°18'W,America/Cayenne,9 +Charlotte Amalie,United States of Virgin Islands,18°21'N,64°56'W,America/Virgin,0 +Chisinau,Moldova,47°02'N,28°50'E,Europe/Chisinau,122 +Conakry,Guinea,09°29'N,13°49'W,Africa/Conakry,26 +Copenhagen,Denmark,55°41'N,12°34'E,Europe/Copenhagen,5 +Cotonou,Benin,06°23'N,02°42'E,Africa/Porto-Novo,5 +Dakar,Senegal,14°34'N,17°29'W,Africa/Dakar,24 +Damascus,Syrian Arab Republic,33°30'N,36°18'E,Asia/Damascus,609 +Dammam,Saudi Arabia,26°30'N,50°12'E,Asia/Riyadh,1 +Dhaka,Bangladesh,23°43'N,90°26'E,Asia/Dhaka,8 +Dili,East Timor,08°29'S,125°34'E,Asia/Dili,11 +Djibouti,Djibouti,11°08'N,42°20'E,Africa/Djibouti,19 +Dodoma,United Republic of Tanzania,06°08'S,35°45'E,Africa/Dar_es_Salaam,1119 +Doha,Qatar,25°15'N,51°35'E,Asia/Qatar,10 +Douglas,Isle Of Man,54°9'N,4°29'W,Europe/London,35 +Dublin,Ireland,53°21'N,06°15'W,Europe/Dublin,85 +Dushanbe,Tajikistan,38°33'N,68°48'E,Asia/Dushanbe,803 +El Aaiun,Morocco,27°9'N,13°12'W,UTC,64 +Fort-de-France,Martinique,14°36'N,61°02'W,America/Martinique,9 +Freetown,Sierra Leone,08°30'N,13°17'W,Africa/Freetown,26 +Funafuti,Tuvalu,08°31'S,179°13'E,Pacific/Funafuti,2 +Gaborone,Botswana,24°45'S,25°57'E,Africa/Gaborone,1005 +George Town,Cayman Islands,19°20'N,81°24'W,America/Cayman,3 +Georgetown,Guyana,06°50'N,58°12'W,America/Guyana,30 +Gibraltar,Gibraltar,36°9'N,5°21'W,Europe/Gibraltar,3 +Guatemala,Guatemala,14°40'N,90°22'W,America/Guatemala,1500 +Hanoi,Viet Nam,21°05'N,105°55'E,Asia/Saigon,6 +Harare,Zimbabwe,17°43'S,31°02'E,Africa/Harare,1503 +Havana,Cuba,23°08'N,82°22'W,America/Havana,59 +Helsinki,Finland,60°15'N,25°03'E,Europe/Helsinki,56 +Hobart,Tasmania,42°53'S,147°19'E,Australia/Hobart,4 +Hong Kong,China,22°16'N,114°09'E,Asia/Hong_Kong,8 +Honiara,Solomon Islands,09°27'S,159°57'E,Pacific/Guadalcanal,8 +Islamabad,Pakistan,33°40'N,73°10'E,Asia/Karachi,508 +Jakarta,Indonesia,06°09'S,106°49'E,Asia/Jakarta,6 +Jerusalem,Israel,31°47'N,35°12'E,Asia/Jerusalem,775 +Juba,South Sudan,4°51'N,31°36'E,Africa/Juba,550 +Jubail,Saudi Arabia,27°02'N,49°39'E,Asia/Riyadh,2 +Kabul,Afghanistan,34°28'N,69°11'E,Asia/Kabul,1791 +Kampala,Uganda,00°20'N,32°30'E,Africa/Kampala,1155 +Kathmandu,Nepal,27°45'N,85°20'E,Asia/Kathmandu,1337 +Khartoum,Sudan,15°31'N,32°35'E,Africa/Khartoum,380 +Kiev,Ukraine,50°30'N,30°28'E,Europe/Kiev,153 +Kigali,Rwanda,01°59'S,30°04'E,Africa/Kigali,1497 +Kingston,Jamaica,18°00'N,76°50'W,America/Jamaica,9 +Kingston,Norfolk Island,45°20'S,168°43'E,Pacific/Norfolk,113 +Kingstown,Saint Vincent and the Grenadines,13°10'N,61°10'W,America/St_Vincent,1 +Kinshasa,Democratic Republic of the Congo,04°20'S,15°15'E,Africa/Kinshasa,312 +Koror,Palau,07°20'N,134°28'E,Pacific/Palau,33 +Kuala Lumpur,Malaysia,03°09'N,101°41'E,Asia/Kuala_Lumpur,22 +Kuwait,Kuwait,29°30'N,48°00'E,Asia/Kuwait,55 +La Paz,Bolivia,16°20'S,68°10'W,America/La_Paz,4014 +Libreville,Gabon,00°25'N,09°26'E,Africa/Libreville,15 +Lilongwe,Malawi,14°00'S,33°48'E,Africa/Blantyre,1229 +Lima,Peru,12°00'S,77°00'W,America/Lima,13 +Lisbon,Portugal,38°42'N,09°10'W,Europe/Lisbon,123 +Ljubljana,Slovenia,46°04'N,14°33'E,Europe/Ljubljana,385 +Lome,Togo,06°09'N,01°20'E,Africa/Lome,25 +London,England,51°30'N,00°07'W,Europe/London,24 +Luanda,Angola,08°50'S,13°15'E,Africa/Luanda,6 +Lusaka,Zambia,15°28'S,28°16'E,Africa/Lusaka,1154 +Luxembourg,Luxembourg,49°37'N,06°09'E,Europe/Luxembourg,232 +Macau,Macao,22°12'N,113°33'E,Asia/Macau,6 +Madinah,Saudi Arabia,24°28'N,39°36'E,Asia/Riyadh,631 +Madrid,Spain,40°25'N,03°45'W,Europe/Madrid,582 +Majuro,Marshall Islands,7°4'N,171°16'E,Pacific/Majuro,65 +Makkah,Saudi Arabia,21°26'N,39°49'E,Asia/Riyadh,240 +Malabo,Equatorial Guinea,03°45'N,08°50'E,Africa/Malabo,56 +Male,Maldives,04°00'N,73°28'E,Indian/Maldives,2 +Mamoudzou,Mayotte,12°48'S,45°14'E,Indian/Mayotte,420 +Managua,Nicaragua,12°06'N,86°20'W,America/Managua,50 +Manama,Bahrain,26°10'N,50°30'E,Asia/Bahrain,2 +Manila,Philippines,14°40'N,121°03'E,Asia/Manila,21 +Maputo,Mozambique,25°58'S,32°32'E,Africa/Maputo,44 +Maseru,Lesotho,29°18'S,27°30'E,Africa/Maseru,1628 +Masqat,Oman,23°37'N,58°36'E,Asia/Muscat,8 +Mbabane,Swaziland,26°18'S,31°06'E,Africa/Mbabane,1243 +Mecca,Saudi Arabia,21°26'N,39°49'E,Asia/Riyadh,240 +Medina,Saudi Arabia,24°28'N,39°36'E,Asia/Riyadh,631 +Mexico,Mexico,19°20'N,99°10'W,America/Mexico_City,2254 +Minsk,Belarus,53°52'N,27°30'E,Europe/Minsk,231 +Mogadishu,Somalia,02°02'N,45°25'E,Africa/Mogadishu,9 +Monaco,Priciplality Of Monaco,43°43'N,7°25'E,Europe/Monaco,206 +Monrovia,Liberia,06°18'N,10°47'W,Africa/Monrovia,9 +Montevideo,Uruguay,34°50'S,56°11'W,America/Montevideo,32 +Moroni,Comoros,11°40'S,43°16'E,Indian/Comoro,29 +Moscow,Russian Federation,55°45'N,37°35'E,Europe/Moscow,247 +Moskva,Russian Federation,55°45'N,37°35'E,Europe/Moscow,247 +Mumbai,India,18°58'N,72°49'E,Asia/Kolkata,14 +Muscat,Oman,23°37'N,58°32'E,Asia/Muscat,8 +N'Djamena,Chad,12°10'N,14°59'E,Africa/Ndjamena,295 +Nairobi,Kenya,01°17'S,36°48'E,Africa/Nairobi,1624 +Nassau,Bahamas,25°05'N,77°20'W,America/Nassau,7 +Naypyidaw,Myanmar,19°45'N,96°6'E,Asia/Rangoon,104 +New Delhi,India,28°37'N,77°13'E,Asia/Kolkata,233 +Ngerulmud,Palau,7°30'N,134°37'E,Pacific/Palau,3 +Niamey,Niger,13°27'N,02°06'E,Africa/Niamey,223 +Nicosia,Cyprus,35°10'N,33°25'E,Asia/Nicosia,162 +Nouakchott,Mauritania,20°10'S,57°30'E,Africa/Nouakchott,3 +Noumea,New Caledonia,22°17'S,166°30'E,Pacific/Noumea,69 +Nuku'alofa,Tonga,21°10'S,174°00'W,Pacific/Tongatapu,6 +Nuuk,Greenland,64°10'N,51°35'W,America/Godthab,70 +Oranjestad,Aruba,12°32'N,70°02'W,America/Aruba,33 +Oslo,Norway,59°55'N,10°45'E,Europe/Oslo,170 +Ottawa,Canada,45°27'N,75°42'W,US/Eastern,79 +Ouagadougou,Burkina Faso,12°15'N,01°30'W,Africa/Ouagadougou,316 +P'yongyang,Democratic People's Republic of Korea,39°09'N,125°30'E,Asia/Pyongyang,21 +Pago Pago,American Samoa,14°16'S,170°43'W,Pacific/Pago_Pago,0 +Palikir,Micronesia,06°55'N,158°09'E,Pacific/Ponape,71 +Panama,Panama,09°00'N,79°25'W,America/Panama,2 +Papeete,French Polynesia,17°32'S,149°34'W,Pacific/Tahiti,7 +Paramaribo,Suriname,05°50'N,55°10'W,America/Paramaribo,7 +Paris,France,48°50'N,02°20'E,Europe/Paris,109 +Perth,Australia,31°56'S,115°50'E,Australia/Perth,20 +Phnom Penh,Cambodia,11°33'N,104°55'E,Asia/Phnom_Penh,10 +Podgorica,Montenegro,42°28'N,19°16'E,Europe/Podgorica,53 +Port Louis,Mauritius,20°9'S,57°30'E,Indian/Mauritius,5 +Port Moresby,Papua New Guinea,09°24'S,147°08'E,Pacific/Port_Moresby,44 +Port-Vila,Vanuatu,17°45'S,168°18'E,Pacific/Efate,1 +Port-au-Prince,Haiti,18°40'N,72°20'W,America/Port-au-Prince,34 +Port of Spain,Trinidad and Tobago,10°40'N,61°31'W,America/Port_of_Spain,66 +Porto-Novo,Benin,06°23'N,02°42'E,Africa/Porto-Novo,38 +Prague,Czech Republic,50°05'N,14°22'E,Europe/Prague,365 +Praia,Cape Verde,15°02'N,23°34'W,Atlantic/Cape_Verde,35 +Pretoria,South Africa,25°44'S,28°12'E,Africa/Johannesburg,1322 +Pristina,Albania,42°40'N,21°10'E,Europe/Tirane,576 +Quito,Ecuador,00°15'S,78°35'W,America/Guayaquil,2812 +Rabat,Morocco,34°1'N,6°50'W,Africa/Casablanca,75 +Reykjavik,Iceland,64°10'N,21°57'W,Atlantic/Reykjavik,61 +Riga,Latvia,56°53'N,24°08'E,Europe/Riga,7 +Riyadh,Saudi Arabia,24°41'N,46°42'E,Asia/Riyadh,612 +Road Town,British Virgin Islands,18°27'N,64°37'W,America/Virgin,1 +Rome,Italy,41°54'N,12°29'E,Europe/Rome,95 +Roseau,Dominica,15°20'N,61°24'W,America/Dominica,72 +Saint Helier,Jersey,49°11'N,2°6'W,Etc/GMT,54 +Saint Pierre,Saint Pierre and Miquelon,46°46'N,56°12'W,America/Miquelon,5 +Saipan,Northern Mariana Islands,15°12'N,145°45'E,Pacific/Saipan,200 +Sana,Yemen,15°20'N,44°12'W,Asia/Aden,2199 +Sana'a,Yemen,15°20'N,44°12'W,Asia/Aden,2199 +San Jose,Costa Rica,09°55'N,84°02'W,America/Costa_Rica,931 +San Juan,Puerto Rico,18°28'N,66°07'W,America/Puerto_Rico,21 +San Marino,San Marino,43°55'N,12°30'E,Europe/San_Marino,749 +San Salvador,El Salvador,13°40'N,89°10'W,America/El_Salvador,621 +Santiago,Chile,33°24'S,70°40'W,America/Santiago,476 +Santo Domingo,Dominica Republic,18°30'N,69°59'W,America/Santo_Domingo,14 +Sao Tome,Sao Tome and Principe,00°10'N,06°39'E,Africa/Sao_Tome,13 +Sarajevo,Bosnia and Herzegovina,43°52'N,18°26'E,Europe/Sarajevo,511 +Seoul,Republic of Korea,37°31'N,126°58'E,Asia/Seoul,49 +Singapore,Republic of Singapore,1°18'N,103°48'E,Asia/Singapore,16 +Skopje,The Former Yugoslav Republic of Macedonia,42°01'N,21°26'E,Europe/Skopje,238 +Sofia,Bulgaria,42°45'N,23°20'E,Europe/Sofia,531 +Sri Jayawardenapura Kotte,Sri Lanka,6°54'N,79°53'E,Asia/Colombo,7 +St. George's,Grenada,32°22'N,64°40'W,America/Grenada,7 +St. John's,Antigua and Barbuda,17°7'N,61°51'W,America/Antigua,1 +St. Peter Port,Guernsey,49°26'N,02°33'W,Europe/Guernsey,1 +Stanley,Falkland Islands,51°40'S,59°51'W,Atlantic/Stanley,23 +Stockholm,Sweden,59°20'N,18°05'E,Europe/Stockholm,52 +Sucre,Bolivia,16°20'S,68°10'W,America/La_Paz,2903 +Suva,Fiji,18°06'S,178°30'E,Pacific/Fiji,0 +Sydney,Australia,33°53'S,151°13'E,Australia/Sydney,3 +Taipei,Republic of China (Taiwan),25°02'N,121°38'E,Asia/Taipei,9 +T'bilisi,Georgia,41°43'N,44°50'E,Asia/Tbilisi,467 +Tbilisi,Georgia,41°43'N,44°50'E,Asia/Tbilisi,467 +Tallinn,Estonia,59°22'N,24°48'E,Europe/Tallinn,39 +Tarawa,Kiribati,01°30'N,173°00'E,Pacific/Tarawa,2 +Tashkent,Uzbekistan,41°20'N,69°10'E,Asia/Tashkent,489 +Tegucigalpa,Honduras,14°05'N,87°14'W,America/Tegucigalpa,994 +Tehran,Iran,35°44'N,51°30'E,Asia/Tehran,1191 +Thimphu,Bhutan,27°31'N,89°45'E,Asia/Thimphu,2300 +Tirana,Albania,41°18'N,19°49'E,Europe/Tirane,90 +Tirane,Albania,41°18'N,19°49'E,Europe/Tirane,90 +Torshavn,Faroe Islands,62°05'N,06°56'W,Atlantic/Faroe,39 +Tokyo,Japan,35°41'N,139°41'E,Asia/Tokyo,8 +Tripoli,Libyan Arab Jamahiriya,32°49'N,13°07'E,Africa/Tripoli,81 +Tunis,Tunisia,36°50'N,10°11'E,Africa/Tunis,4 +Ulan Bator,Mongolia,47°55'N,106°55'E,Asia/Ulaanbaatar,1330 +Ulaanbaatar,Mongolia,47°55'N,106°55'E,Asia/Ulaanbaatar,1330 +Vaduz,Liechtenstein,47°08'N,09°31'E,Europe/Vaduz,463 +Valletta,Malta,35°54'N,14°31'E,Europe/Malta,48 +Vienna,Austria,48°12'N,16°22'E,Europe/Vienna,171 +Vientiane,Lao People's Democratic Republic,17°58'N,102°36'E,Asia/Vientiane,171 +Vilnius,Lithuania,54°38'N,25°19'E,Europe/Vilnius,156 +W. Indies,Antigua and Barbuda,17°20'N,61°48'W,America/Antigua,0 +Warsaw,Poland,52°13'N,21°00'E,Europe/Warsaw,107 +Washington DC,USA,39°91'N,77°02'W,US/Eastern,23 +Wellington,New Zealand,41°19'S,174°46'E,Pacific/Auckland,7 +Willemstad,Netherlands Antilles,12°05'N,69°00'W,America/Curacao,1 +Windhoek,Namibia,22°35'S,17°04'E,Africa/Windhoek,1725 +Yamoussoukro,Cote d'Ivoire,06°49'N,05°17'W,Africa/Abidjan,213 +Yangon,Myanmar,16°45'N,96°20'E,Asia/Rangoon,33 +Yaounde,Cameroon,03°50'N,11°35'E,Africa/Douala,760 +Yaren,Nauru,0°32'S,166°55'E,Pacific/Nauru,0 +Yerevan,Armenia,40°10'N,44°31'E,Asia/Yerevan,890 +Zagreb,Croatia,45°50'N,15°58'E,Europe/Zagreb,123 + +# UK Cities +Aberdeen,Scotland,57°08'N,02°06'W,Europe/London,65 +Birmingham,England,52°30'N,01°50'W,Europe/London,99 +Bolton,England,53°35'N,02°15'W,Europe/London,105 +Bradford,England,53°47'N,01°45'W,Europe/London,127 +Bristol,England,51°28'N,02°35'W,Europe/London,11 +Cardiff,Wales,51°29'N,03°13'W,Europe/London,9 +Crawley,England,51°8'N,00°10'W,Europe/London,77 +Edinburgh,Scotland,55°57'N,03°13'W,Europe/London,61 +Glasgow,Scotland,55°50'N,04°15'W,Europe/London,8 +Greenwich,England,51°28'N,00°00'W,Europe/London,24 +Leeds,England,53°48'N,01°35'W,Europe/London,47 +Leicester,England,52°38'N,01°08'W,Europe/London,138 +Liverpool,England,53°25'N,03°00'W,Europe/London,25 +Manchester,England,53°30'N,02°15'W,Europe/London,78 +Newcastle Upon Time,England,54°59'N,01°36'W,Europe/London,47 +Newcastle,England,54°59'N,01°36'W,Europe/London,47 +Norwich,England,52°38'N,01°18'E,Europe/London,18 +Oxford,England,51°45'N,01°15'W,Europe/London,72 +Plymouth,England,50°25'N,04°15'W,Europe/London,50 +Portsmouth,England,50°48'N,01°05'W,Europe/London,9 +Reading,England,51°27'N,0°58'W,Europe/London,84 +Sheffield,England,53°23'N,01°28'W,Europe/London,105 +Southampton,England,50°55'N,01°25'W,Europe/London,9 +Swansea,England,51°37'N,03°57'W,Europe/London,91 +Swindon,England,51°34'N,01°47'W,Europe/London,112 +Wolverhampton,England,52°35'N,2°08'W,Europe/London,89 +Barrow-In-Furness,England,54°06'N,3°13'W,Europe/London,20 + +# US State Capitals +Montgomery,USA,32°21'N,86°16'W,US/Central,42 +Juneau,USA,58°23'N,134°11'W,US/Alaska,29 +Phoenix,USA,33°26'N,112°04'W,America/Phoenix,331 +Little Rock,USA,34°44'N,92°19'W,US/Central,95 +Sacramento,USA,38°33'N,121°28'W,US/Pacific,15 +Denver,USA,39°44'N,104°59'W,US/Mountain,1600 +Hartford,USA,41°45'N,72°41'W,US/Eastern,9 +Dover,USA,39°09'N,75°31'W,US/Eastern,8 +Tallahassee,USA,30°27'N,84°16'W,US/Eastern,59 +Atlanta,USA,33°45'N,84°23'W,US/Eastern,267 +Honolulu,USA,21°18'N,157°49'W,US/Hawaii,229 +Boise,USA,43°36'N,116°12'W,US/Mountain,808 +Springfield,USA,39°47'N,89°39'W,US/Central,190 +Indianapolis,USA,39°46'N,86°9'W,US/Eastern,238 +Des Moines,USA,41°35'N,93°37'W,US/Central,276 +Topeka,USA,39°03'N,95°41'W,US/Central,289 +Frankfort,USA,38°11'N,84°51'W,US/Eastern,243 +Baton Rouge,USA,30°27'N,91°8'W,US/Central,15 +Augusta,USA,44°18'N,69°46'W,US/Eastern,41 +Annapolis,USA,38°58'N,76°30'W,US/Eastern,0 +Boston,USA,42°21'N,71°03'W,US/Eastern,6 +Lansing,USA,42°44'N,84°32'W,US/Eastern,271 +Saint Paul,USA,44°56'N,93°05'W,US/Central,256 +Jackson,USA,32°17'N,90°11'W,US/Central,90 +Jefferson City,USA,38°34'N,92°10'W,US/Central,167 +Helena,USA,46°35'N,112°1'W,US/Mountain,1150 +Lincoln,USA,40°48'N,96°40'W,US/Central,384 +Carson City,USA,39°9'N,119°45'W,US/Pacific,1432 +Concord,USA,43°12'N,71°32'W,US/Eastern,117 +Trenton,USA,40°13'N,74°45'W,US/Eastern,28 +Santa Fe,USA,35°40'N,105°57'W,US/Mountain,2151 +Albany,USA,42°39'N,73°46'W,US/Eastern,17 +Raleigh,USA,35°49'N,78°38'W,US/Eastern,90 +Bismarck,USA,46°48'N,100°46'W,US/Central,541 +Columbus,USA,39°59'N,82°59'W,US/Eastern,271 +Oklahoma City,USA,35°28'N,97°32'W,US/Central,384 +Salem,USA,44°55'N,123°1'W,US/Pacific,70 +Harrisburg,USA,40°16'N,76°52'W,US/Eastern,112 +Providence,USA,41°49'N,71°25'W,US/Eastern,2 +Columbia,USA,34°00'N,81°02'W,US/Eastern,96 +Pierre,USA,44°22'N,100°20'W,US/Central,543 +Nashville,USA,36°10'N,86°47'W,US/Central,149 +Austin,USA,30°16'N,97°45'W,US/Central,167 +Salt Lake City,USA,40°45'N,111°53'W,US/Mountain,1294 +Montpelier,USA,44°15'N,72°34'W,US/Eastern,325 +Richmond,USA,37°32'N,77°25'W,US/Eastern,68 +Olympia,USA,47°2'N,122°53'W,US/Pacific,35 +Charleston,USA,38°20'N,81°38'W,US/Eastern,11 +Madison,USA,43°4'N,89°24'W,US/Central,281 +Cheyenne,USA,41°8'N,104°48'W,US/Mountain,1860 + +# Major US Cities +Birmingham,USA,33°39'N,86°48'W,US/Central,197 +Anchorage,USA,61°13'N,149°53'W,US/Alaska,30 +Los Angeles,USA,34°03'N,118°15'W,US/Pacific,50 +San Francisco,USA,37°46'N,122°25'W,US/Pacific,47 +Bridgeport,USA,41°11'N,73°11'W,US/Eastern,13 +Wilmington,USA,39°44'N,75°32'W,US/Eastern,15 +Jacksonville,USA,30°19'N,81°39'W,US/Eastern,13 +Miami,USA,26°8'N,80°12'W,US/Eastern,10 +Chicago,USA,41°50'N,87°41'W,US/Central,189 +Wichita,USA,37°41'N,97°20'W,US/Central,399 +Louisville,USA,38°15'N,85°45'W,US/Eastern,142 +New Orleans,USA,29°57'N,90°4'W,US/Central,10 +Portland,USA,43°39'N,70°16'W,US/Eastern,6 +Baltimore,USA,39°17'N,76°37'W,US/Eastern,31 +Detroit,USA,42°19'N,83°2'W,US/Eastern,189 +Minneapolis,USA,44°58'N,93°15'W,US/Central,260 +Kansas City,USA,39°06'N,94°35'W,US/Central,256 +Billings,USA,45°47'N,108°32'W,US/Mountain,946 +Omaha,USA,41°15'N,96°0'W,US/Central,299 +Las Vegas,USA,36°10'N,115°08'W,US/Pacific,720 +Manchester,USA,42°59'N,71°27'W,US/Eastern,56 +Newark,USA,40°44'N,74°11'W,US/Eastern,4 +Albuquerque,USA,35°06'N,106°36'W,US/Mountain,1523 +New York,USA,40°43'N,74°0'W,US/Eastern,17 +Charlotte,USA,35°13'N,80°50'W,US/Eastern,217 +Fargo,USA,46°52'N,96°47'W,US/Central,271 +Cleveland,USA,41°28'N,81°40'W,US/Eastern,210 +Philadelphia,USA,39°57'N,75°10'W,US/Eastern,62 +Sioux Falls,USA,43°32'N,96°43'W,US/Central,443 +Memphis,USA,35°07'N,89°58'W,US/Central,84 +Houston,USA,29°45'N,95°22'W,US/Central,8 +Dallas,USA,32°47'N,96°48'W,US/Central,137 +Burlington,USA,44°28'N,73°9'W,US/Eastern,35 +Virginia Beach,USA,36°50'N,76°05'W,US/Eastern,9 +Seattle,USA,47°36'N,122°19'W,US/Pacific,63 +Milwaukee,USA,43°03'N,87°57'W,US/Central,188 +San Diego,USA,32°42'N,117°09'W,US/Pacific,16 +Orlando,USA,28°32'N,81°22'W,US/Eastern,35 +Buffalo,USA,42°54'N,78°50'W,US/Eastern,188 +Toledo,USA,41°39'N,83°34'W,US/Eastern,180 + +# Canadian cities +Vancouver,Canada,49°15'N,123°6'W,America/Vancouver,55 +Calgary,Canada,51°2'N,114°3'W,America/Edmonton,1040 +Edmonton,Canada,53°32'N,113°29'W,America/Edmonton,664 +Saskatoon,Canada,52°8'N,106°40'W,America/Regina,480 +Regina,Canada,50°27'N,104°36'W,America/Regina,577 +Winnipeg,Canada,49°53'N,97°8'W,America/Winnipeg,229 +Toronto,Canada,43°39'N,79°22'W,America/Toronto,77 +Montreal,Canada,45°30'N,73°33'W,America/Montreal,23 +Quebec,Canada,46°48'N,71°14'W,America/Toronto,87 +Fredericton,Canada,45°57'N,66°38'W,America/Halifax,8 +Halifax,Canada,44°38'N,63°34'W,America/Halifax,36 +Charlottetown,Canada,46°14'N,63°7'W,America/Halifax,2 +St. John's,Canada,47°33'N,52°42'W,America/Halifax,116 +Whitehorse,Canada,60°43'N,135°3'W,America/Whitehorse,696 +Yellowknife,Canada,62°27'N,114°22'W,America/Yellowknife,191 +Iqaluit,Canada,63°44'N,68°31'W,America/Iqaluit,3 +""" + +class AstralError(Exception): + pass + +class Location(object): + """Provides access to information for single location.""" + + def __init__(self, info=None): + """Initializes the object with a tuple of information. + + :param info: A tuple of information to fill in the location info. + + The tuple should contain items in the following order + + ================ ============= + Field Default + ================ ============= + name Greenwich + region England + latitude 51.168 + longitude 0 + time zone name Europe/London + elevation 24 + ================ ============= + + See :attr:`timezone` property for a method of obtaining time zone + names + """ + + self.astral = None + if info is None: + self.name = 'Greenwich' + self.region = 'England' + self._latitude = 51.168 + self._longitude = 0.0 + self._timezone_group = 'Europe' + self._timezone_location = 'London' + self._elevation = 24 + else: + self.name = '' + self.region = '' + self._latitude = 0.0 + self._longitude = 0.0 + self._timezone_group = '' + self._timezone_location = '' + self._elevation = 0 + + try: + self.name = info[0] + self.region = info[1] + self.latitude = info[2] + self.longitude = info[3] + self.timezone = info[4] + self.elevation = info[5] + except IndexError: + pass + + self.url = '' + + def __repr__(self): + repr_format = '%s/%s, tz=%s, lat=%0.02f, lon=%0.02f' + return repr_format % (self.name, self.region, + self.timezone, + self.latitude, self.longitude) + + @property + def latitude(self): + """The location's latitude + + ``latitude`` can be set either as a string or as a number + + For strings they must be of the form + + degrees°minutes'[N|S] e.g. 51°31'N + + For numbers, positive numbers signify latitudes to the North. + """ + + return self._latitude + + @latitude.setter + def latitude(self, latitude): + if isinstance(latitude, str) or isinstance(latitude, ustr): + (deg, rest) = latitude.split("°", 1) + (minute, rest) = rest.split("'", 1) + + self._latitude = float(deg) + (float(minute) / 60) + + if latitude.endswith("S"): + self._latitude = -self._latitude + else: + self._latitude = float(latitude) + + @property + def longitude(self): + """The location's longitude. + + ``longitude`` can be set either as a string or as a number + + For strings they must be of the form + + degrees°minutes'[E|W] e.g. 51°31'W + + For numbers, positive numbers signify longitudes to the East. + """ + + return self._longitude + + @longitude.setter + def longitude(self, longitude): + if isinstance(longitude, str) or isinstance(longitude, ustr): + (deg, rest) = longitude.split("°", 1) + (minute, rest) = rest.split("'", 1) + + self._longitude = float(deg) + (float(minute) / 60) + + if longitude.endswith("W"): + self._longitude = -self._longitude + else: + self._longitude = float(longitude) + + @property + def elevation(self): + """The elevation in metres above sea level.""" + + return self._elevation + + @elevation.setter + def elevation(self, elevation): + self._elevation = int(elevation) + + @property + def timezone(self): + """The name of the time zone for the location. + + A list of time zone names can be obtained from pytz. For example. + + >>> from pytz import all_timezones + >>> for timezone in all_timezones: + ... print(timezone) + """ + + if self._timezone_location != '': + return '%s/%s' % (self._timezone_group, + self._timezone_location) + else: + return self._timezone_group + + @timezone.setter + def timezone(self, name): + if name not in pytz.all_timezones: + raise ValueError('Timezone \'%s\' not recognized' % name) + + try: + self._timezone_group, self._timezone_location = \ + name.split('/', 1) + except ValueError: + self._timezone_group = name + self._timezone_location = '' + + @property + def tz(self): + """Time zone information.""" + + try: + tz = pytz.timezone(self.timezone) + return tz + except pytz.UnknownTimeZoneError: + raise AstralError('Unknown timezone \'%s\'' % self.timezone) + + tzinfo = tz + + @property + def solar_depression(self): + """The number of degrees the sun must be below the horizon for the + dawn/dusk calculation. + + Can either be set as a number of degrees below the horizon or as + one of the following strings + + ============= ======= + String Degrees + ============= ======= + civil 6.0 + nautical 12.0 + astronomical 18.0 + ============= ======= + """ + + return self.astral.solar_depression + + @solar_depression.setter + def solar_depression(self, depression): + if self.astral is None: + self.astral = Astral() + + self.astral.solar_depression = depression + + def sun(self, date=None, local=True): + """Returns dawn, sunrise, noon, sunset and dusk as a dictionary. + + :param date: The date for which to calculate the times. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``, + ``sunset`` and ``dusk`` whose values are the results of the + corresponding methods. + :rtype: dict + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + sun = self.astral.sun_utc(date, self.latitude, self.longitude) + + if local: + for key, dt in sun.items(): + sun[key] = dt.astimezone(self.tz) + + return sun + + def dawn(self, date=None, local=True): + """Calculates the time in the morning when the sun is a certain number + of degrees below the horizon. By default this is 6 degrees but can be + changed by setting the :attr:`Astral.solar_depression` property. + + :param date: The date for which to calculate the dawn time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: The date and time at which dawn occurs. + :rtype: :class:`~datetime.datetime` + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + dawn = self.astral.dawn_utc(date, self.latitude, self.longitude) + + if local: + return dawn.astimezone(self.tz) + else: + return dawn + + def sunrise(self, date=None, local=True): + """Return sunrise time. + + Calculates the time in the morning when the sun is a 0.833 degrees + below the horizon. This is to account for refraction. + + :param date: The date for which to calculate the sunrise time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: The date and time at which sunrise occurs. + :rtype: :class:`~datetime.datetime` + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + sunrise = self.astral.sunrise_utc(date, self.latitude, self.longitude) + + if local: + return sunrise.astimezone(self.tz) + else: + return sunrise + + def solar_noon(self, date=None, local=True): + """Calculates the solar noon (the time when the sun is at its highest + point.) + + :param date: The date for which to calculate the noon time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: The date and time at which the solar noon occurs. + :rtype: :class:`~datetime.datetime` + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + noon = self.astral.solar_noon_utc(date, self.longitude) + + if local: + return noon.astimezone(self.tz) + else: + return noon + + def sunset(self, date=None, local=True): + """Calculates sunset time (the time in the evening when the sun is a + 0.833 degrees below the horizon. This is to account for refraction.) + + :param date: The date for which to calculate the sunset time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: The date and time at which sunset occurs. + :rtype: :class:`~datetime.datetime` + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + sunset = self.astral.sunset_utc(date, self.latitude, self.longitude) + + if local: + return sunset.astimezone(self.tz) + else: + return sunset + + def dusk(self, date=None, local=True): + """Calculates the dusk time (the time in the evening when the sun is a + certain number of degrees below the horizon. By default this is 6 + degrees but can be changed by setting the + :attr:`solar_depression` property.) + + :param date: The date for which to calculate the dusk time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: The date and time at which dusk occurs. + :rtype: :class:`~datetime.datetime` + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + dusk = self.astral.dusk_utc(date, self.latitude, self.longitude) + + if local: + return dusk.astimezone(self.tz) + else: + return dusk + + def daylight(self, date=None, local=True): + """Calculates the daylight time (the time between sunrise and sunset) + + :param date: The date for which to calculate daylight. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: A tuple containing the start and end times + :rtype: tuple(:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + start, end = self.astral.daylight_utc(date, self.latitude, self.longitude) + + if local: + return start.astimezone(self.tz), end.astimezone(self.tz) + else: + return start, end + + def night(self, date=None, local=True): + """Calculates the night time (the time between astronomical dusk and + astronomical dawn of the next day) + + :param date: The date for which to calculate the start of the night time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: A tuple containing the start and end times + :rtype: tuple(:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + start, end = self.astral.night_utc(date, self.latitude, self.longitude) + + if local: + return start.astimezone(self.tz), end.astimezone(self.tz) + else: + return start, end + + def twilight(self, direction=SUN_RISING, date=None, local=True): + """Returns the start and end times of Twilight in the UTC timezone when + the sun is traversing in the specified direction. + + This method defines twilight as being between the time + when the sun is at -6 degrees and sunrise/sunset. + + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. + :type direction: int + :param date: The date for which to calculate the times. + :type date: :class:`datetime.date` + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :return: A tuple of the UTC date and time at which twilight starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if date is None: + date = datetime.date.today() + + start, end = self.astral.twilight_utc(date, direction, + self.latitude, self.longitude) + + if local: + return start.astimezone(self.tz), end.astimezone(self.tz) + else: + return start, end + + def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True): + """Calculate the time when the sun is at the specified elevation. + + Note: + This method uses positive elevations for those above the horizon. + + Elevations greater than 90 degrees are converted to a setting sun + i.e. an elevation of 110 will calculate a setting sun at 70 degrees. + + :param elevation: Elevation in degrees above the horizon to calculate for. + :type elevation: float + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. + :type direction: int + :param date: The date for which to calculate the elevation time. + If no date is specified then the current date will be used. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + If not specified then the time will be returned in local time + + :returns: The date and time at which dusk occurs. + :rtype: :class:`~datetime.datetime` + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + if elevation > 90.0: + elevation = 180.0 - elevation + direction = SUN_SETTING + + time_ = self.astral.time_at_elevation_utc(elevation, direction, date, self.latitude, self.longitude) + + if local: + return time_.astimezone(self.tz) + else: + return time_ + + def rahukaalam(self, date=None, local=True): + """Calculates the period of rahukaalam. + + :param date: The date for which to calculate the rahukaalam period. + A value of ``None`` uses the current date. + + :param local: True = Time to be returned in location's time zone; + False = Time to be returned in UTC. + + :return: Tuple containing the start and end times for Rahukaalam. + :rtype: tuple + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + rahukaalam = self.astral.rahukaalam_utc(date, + self.latitude, self.longitude) + + if local: + rahukaalam = (rahukaalam[0].astimezone(self.tz), + rahukaalam[1].astimezone(self.tz)) + + return rahukaalam + + def golden_hour(self, direction=SUN_RISING, date=None, local=True): + """Returns the start and end times of the Golden Hour when the sun is traversing + in the specified direction. + + This method uses the definition from PhotoPills i.e. the + golden hour is when the sun is between 4 degrees below the horizon + and 6 degrees above. + + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. + :type direction: int + :param date: The date for which to calculate the times. + :type date: :class:`datetime.date` + :param local: True = Times to be returned in location's time zone; + False = Times to be returned in UTC. + If not specified then the time will be returned in local time + + :return: A tuple of the date and time at which the Golden Hour starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + start, end = self.astral.golden_hour_utc(direction, date, + self.latitude, self.longitude) + + if local: + start = start.astimezone(self.tz) + end = end.astimezone(self.tz) + + return start, end + + def blue_hour(self, direction=SUN_RISING, date=None, local=True): + """Returns the start and end times of the Blue Hour when the sun is traversing + in the specified direction. + + This method uses the definition from PhotoPills i.e. the + blue hour is when the sun is between 6 and 4 degrees below the horizon. + + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. + :type direction: int + :param date: The date for which to calculate the times. + If no date is specified then the current date will be used. + + :param local: True = Times to be returned in location's time zone; + False = Times to be returned in UTC. + If not specified then the time will be returned in local time + + :return: A tuple of the date and time at which the Blue Hour starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + start, end = self.astral.blue_hour_utc(direction, date, + self.latitude, self.longitude) + + if local: + start = start.astimezone(self.tz) + end = end.astimezone(self.tz) + + return start, end + + def solar_azimuth(self, dateandtime=None): + """Calculates the solar azimuth angle for a specific date/time. + + :param dateandtime: The date and time for which to calculate the angle. + :type dateandtime: :class:`~datetime.datetime` + + :returns: The azimuth angle in degrees clockwise from North. + :rtype: float + """ + + if self.astral is None: + self.astral = Astral() + + if dateandtime is None: + dateandtime = datetime.datetime.now(self.tz) + elif not dateandtime.tzinfo: + dateandtime = self.tz.localize(dateandtime) + + dateandtime = dateandtime.astimezone(pytz.UTC) + + return self.astral.solar_azimuth(dateandtime, + self.latitude, self.longitude) + + def solar_elevation(self, dateandtime=None): + """Calculates the solar elevation angle for a specific time. + + :param dateandtime: The date and time for which to calculate the angle. + :type dateandtime: :class:`~datetime.datetime` + + :returns: The elevation angle in degrees above the horizon. + :rtype: float + """ + + if self.astral is None: + self.astral = Astral() + + if dateandtime is None: + dateandtime = datetime.datetime.now(self.tz) + elif not dateandtime.tzinfo: + dateandtime = self.tz.localize(dateandtime) + + dateandtime = dateandtime.astimezone(pytz.UTC) + + return self.astral.solar_elevation(dateandtime, + self.latitude, self.longitude) + + def solar_zenith(self, dateandtime=None): + """Calculates the solar zenith angle for a specific time. + + :param dateandtime: The date and time for which to calculate the angle. + :type dateandtime: :class:`~datetime.datetime` + + :returns: The zenith angle in degrees above the horizon. + :rtype: float + """ + + return self.solar_elevation(dateandtime) + + def moon_phase(self, date=None): + """Calculates the moon phase for a specific date. + + :param date: The date to calculate the phase for. + If ommitted the current date is used. + :type date: :class:`datetime.date` + + :returns: + A number designating the phase + + | 0 = New moon + | 7 = First quarter + | 14 = Full moon + | 21 = Last quarter + :rtype: int + """ + + if self.astral is None: + self.astral = Astral() + + if date is None: + date = datetime.date.today() + + return self.astral.moon_phase(date) + + +class LocationGroup(object): + def __init__(self, name): + self.name = name + self._locations = {} + + def __getitem__(self, key): + """Returns a Location object for the specified `key`. + + group = astral.europe + location = group['London'] + + You can supply an optional region name by adding a comma + followed by the region name. Where multiple locations have the + same name you may need to supply the region name otherwise + the first result will be returned which may not be the one + you're looking for. + + location = group['Abu Dhabi,United Arab Emirates'] + + Handles location names with spaces and mixed case. + """ + + key = self._sanitize_key(key) + + try: + lookup_name, lookup_region = key.split(',', 1) + except ValueError: + lookup_name = key + lookup_region = '' + + lookup_name = lookup_name.strip('"\'') + lookup_region = lookup_region.strip('"\'') + + for (location_name, location_list) in self._locations.items(): + if location_name == lookup_name: + if lookup_region == '': + return location_list[0] + + for location in location_list: + if self._sanitize_key(location.region) == lookup_region: + return location + + raise KeyError('Unrecognised location name - %s' % key) + + def __setitem__(self, key, value): + key = self._sanitize_key(key) + if key not in self._locations: + self._locations[key] = [value] + else: + self._locations[key].append(value) + + def __contains__(self, key): + key = self._sanitize_key(key) + for name in self._locations.keys(): + if name == key: + return True + + return False + + def __iter__(self): + for location_list in self._locations.values(): + for location in location_list: + yield location + + def keys(self): + return self._locations.keys() + + def values(self): + return self._locations.values() + + def items(self): + return self._locations.items() + + @property + def locations(self): + k = [] + for location_list in self._locations.values(): + for location in location_list: + k.append(location.name) + + return k + + def _sanitize_key(self, key): + return str(key).lower().replace(' ', '_') + + +class AstralGeocoder(object): + """Looks up geographic information from the locations stored within the + module + """ + + def __init__(self): + self._groups = {} + + locations = _LOCATION_INFO.split('\n') + for line in locations: + line = line.strip() + if line != '' and line[0] != '#': + if line[-1] == '\n': + line = line[:-1] + + info = line.split(',') + + l = Location(info) + + key = l._timezone_group.lower() + try: + group = self.__getattr__(key) + except AttributeError: + group = LocationGroup(l._timezone_group) + self._groups[key] = group + + group[info[0].lower()] = l + + def __getattr__(self, key): + """Access to each timezone group. For example London is in timezone + group Europe. + + Attribute lookup is case insensitive""" + + key = str(key).lower() + for name, value in self._groups.items(): + if name == key: + return value + + raise AttributeError('Group \'%s\' not found' % key) + + def __getitem__(self, key): + """Lookup a location within all timezone groups. + + Item lookup is case insensitive.""" + + key = str(key).lower() + for group in self._groups.values(): + try: + return group[key] + except KeyError: + pass + + raise KeyError('Unrecognised location name - %s' % key) + + def __iter__(self): + return self._groups.__iter__() + + def __contains__(self, key): + key = str(key).lower() + for name, group in self._groups.items(): + if name == key: + return True + + if key in group: + return True + + return False + + @property + def locations(self): + k = [] + for group in self._groups.values(): + k.extend(group.locations) + + return k + + @property + def groups(self): + return self._groups + + +class GoogleGeocoder(object): + """Use Google Maps API Web Service to lookup GPS co-ordinates, timezone and + elevation. + + See the following for more info. + https://developers.google.com/maps/documentation/ + """ + + def __init__(self, cache=False): + self.cache = cache + self.geocache = {} + self._location_query_base = 'http://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false' + self._timezone_query_base = 'https://maps.googleapis.com/maps/api/timezone/json?location=%f,%f×tamp=%d&sensor=false' + self._elevation_query_base = 'http://maps.googleapis.com/maps/api/elevation/json?locations=%f,%f&sensor=false' + + def __getitem__(self, key): + if self.cache and key in self.geocache: + return self.geocache[key] + + location = Location() + try: + self._get_geocoding(key, location) + self._get_timezone(location) + self._get_elevation(location) + except URLError: + raise AstralError(('GoogleGeocoder: Unable to contact ' + 'Google maps API')) + + url = 'http://maps.google.com/maps?q=loc:%f,%f' + location.url = url % (location.latitude, location.longitude) + + if self.cache: + self.geocache[key] = location + + return location + + def _get_geocoding(self, key, location): + """Lookup the Google geocoding API information for `key`""" + + url = self._location_query_base % quote_plus(key) + data = self._read_from_url(url) + response = json.loads(data) + if response['status'] == 'OK': + formatted_address = response['results'][0]['formatted_address'] + pos = formatted_address.find(',') + if pos == -1: + location.name = formatted_address + location.region = '' + else: + location.name = formatted_address[:pos].strip() + location.region = formatted_address[pos + 1:].strip() + + l = response['results'][0]['geometry']['location'] + location.latitude = float(l['lat']) + location.longitude = float(l['lng']) + else: + raise AstralError('GoogleGeocoder: Unable to locate %s' % key) + + def _get_timezone(self, location): + """Query the timezone information with the latitude and longitude of + the specified `location`. + + This function assumes the timezone of the location has always been + the same as it is now by using time() in the query string. + """ + + url = self._timezone_query_base % (location.latitude, + location.longitude, + int(time())) + data = self._read_from_url(url) + response = json.loads(data) + if response['status'] == 'OK': + location.timezone = response['timeZoneId'] + else: + location.timezone = 'UTC' + + def _get_elevation(self, location): + """Query the elevation information with the latitude and longitude of + the specified `location`. + """ + + url = self._elevation_query_base % (location.latitude, + location.longitude) + data = self._read_from_url(url) + response = json.loads(data) + if response['status'] == 'OK': + location.elevation = int(float(response['results'][0]['elevation'])) + else: + location.elevation = 0 + + def _read_from_url(self, url): + ds = urlopen(url) + content_types = ds.headers.get('Content-Type').split(';') + + charset = 'UTF-8' + for ct in content_types: + if ct.strip().startswith('charset'): + charset = ct.split('=')[1] + + data = ds.read().decode(charset) + ds.close() + + return data + + +class Astral(object): + def __init__(self, geocoder=AstralGeocoder): + """Initialise the geocoder and set the default depression.""" + + self.geocoder = geocoder() + self._depression = 6 # Set default depression in degrees + + def __getitem__(self, key): + """Returns the Location instance specified by ``key``.""" + + location = self.geocoder[key] + location.astral = self + return location + + @property + def solar_depression(self): + """The number of degrees the sun must be below the horizon for the + dawn/dusk calculation. + + Can either be set as a number of degrees below the horizon or as + one of the following strings + + ============= ======= + String Degrees + ============= ======= + civil 6.0 + nautical 12.0 + astronomical 18.0 + ============= ======= + """ + + return self._depression + + @solar_depression.setter + def solar_depression(self, depression): + if isinstance(depression, str) or isinstance(depression, ustr): + try: + self._depression = { + 'civil': 6, + 'nautical': 12, + 'astronomical': 18}[depression] + except: + raise KeyError(("solar_depression must be either a number " + "or one of 'civil', 'nautical' or " + "'astronomical'")) + else: + self._depression = float(depression) + + def sun_utc(self, date, latitude, longitude): + """Calculate all the info for the sun at once. + All times are returned in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :returns: Dictionary with keys ``dawn``, ``sunrise``, ``noon``, + ``sunset`` and ``dusk`` whose values are the results of the + corresponding `_utc` methods. + :rtype: dict + """ + + dawn = self.dawn_utc(date, latitude, longitude) + sunrise = self.sunrise_utc(date, latitude, longitude) + noon = self.solar_noon_utc(date, longitude) + sunset = self.sunset_utc(date, latitude, longitude) + dusk = self.dusk_utc(date, latitude, longitude) + + return { + 'dawn': dawn, + 'sunrise': sunrise, + 'noon': noon, + 'sunset': sunset, + 'dusk': dusk + } + + def dawn_utc(self, date, latitude, longitude, depression=0): + """Calculate dawn time in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + :param depression: Override the depression used + :type depression: float + + :return: The UTC date and time at which dawn occurs. + :rtype: :class:`~datetime.datetime` + """ + + if depression == 0: + depression = self._depression + depression += 90 + + try: + return self._calc_time(depression, SUN_RISING, date, latitude, longitude) + except: + raise AstralError(('Sun never reaches %d degrees below the horizon, ' + 'at this location.') % (depression - 90)) + + def sunrise_utc(self, date, latitude, longitude): + """Calculate sunrise time in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The UTC date and time at which sunrise occurs. + :rtype: :class:`~datetime.datetime` + """ + + try: + return self._calc_time(90 + 0.833, SUN_RISING, date, latitude, longitude) + except: + raise AstralError(('Sun remains below the horizon on this day, ' + 'at this location.')) + + def solar_noon_utc(self, date, longitude): + """Calculate solar noon time in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The UTC date and time at which noon occurs. + :rtype: :class:`~datetime.datetime` + """ + + julianday = self._julianday(date) + + newt = self._jday_to_jcentury(julianday + 0.5 + -longitude / 360.0) + + eqtime = self._eq_of_time(newt) + timeUTC = 720.0 + (-longitude * 4.0) - eqtime + + timeUTC = timeUTC / 60.0 + hour = int(timeUTC) + minute = int((timeUTC - hour) * 60) + second = int((((timeUTC - hour) * 60) - minute) * 60) + + if second > 59: + second -= 60 + minute += 1 + elif second < 0: + second += 60 + minute -= 1 + + if minute > 59: + minute -= 60 + hour += 1 + elif minute < 0: + minute += 60 + hour -= 1 + + if hour > 23: + hour -= 24 + date += datetime.timedelta(days=1) + elif hour < 0: + hour += 24 + date -= datetime.timedelta(days=1) + + noon = datetime.datetime(date.year, date.month, date.day, + hour, minute, second) + noon = pytz.UTC.localize(noon) + + return noon + + def sunset_utc(self, date, latitude, longitude): + """Calculate sunset time in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The UTC date and time at which sunset occurs. + :rtype: :class:`~datetime.datetime` + """ + + try: + return self._calc_time(90 + 0.833, SUN_SETTING, date, latitude, longitude) + except: + raise AstralError(('Sun remains above the horizon on this day, ' + 'at this location.')) + + def dusk_utc(self, date, latitude, longitude, depression=0): + """Calculate dusk time in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + :param depression: Override the depression used + :type depression: float + + :return: The UTC date and time at which dusk occurs. + :rtype: :class:`~datetime.datetime` + """ + + if depression == 0: + depression = self._depression + depression += 90 + + try: + return self._calc_time(depression, SUN_SETTING, date, latitude, longitude) + except: + raise AstralError(('Sun never reaches %d degrees below the horizon, ' + 'at this location.') % (depression - 90)) + + def daylight_utc(self, date, latitude, longitude): + """Calculate daylight start and end times in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: A tuple of the UTC date and time at which daylight starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + start = self.sunrise_utc(date, latitude, longitude) + end = self.sunset_utc(date, latitude, longitude) + + return start, end + + def night_utc(self, date, latitude, longitude): + """Calculate night start and end times in the UTC timezone. + + Night is calculated to be between astronomical dusk on the + date specified and astronomical dawn of the next day. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: A tuple of the UTC date and time at which night starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + start = self.dusk_utc(date, latitude, longitude, 18) + tomorrow = date + datetime.timedelta(days=1) + end = self.dawn_utc(tomorrow, latitude, longitude, 18) + + return start, end + + def twilight_utc(self, direction, date, latitude, longitude): + """Returns the start and end times of Twilight in the UTC timezone when + the sun is traversing in the specified direction. + + This method defines twilight as being between the time + when the sun is at -6 degrees and sunrise/sunset. + + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. + :type direction: int + :param date: The date for which to calculate the times. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: A tuple of the UTC date and time at which twilight starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if date is None: + date = datetime.date.today() + + start = self.time_at_elevation_utc(-6, direction, date, latitude, longitude) + if direction == SUN_RISING: + end = self.sunrise_utc(date, latitude, longitude) + else: + end = self.sunset_utc(date, latitude, longitude) + + if direction == SUN_RISING: + return start, end + else: + return end, start + + def golden_hour_utc(self, direction, date, latitude, longitude): + """Returns the start and end times of the Golden Hour in the UTC timezone + when the sun is traversing in the specified direction. + + This method uses the definition from PhotoPills i.e. the + golden hour is when the sun is between 4 degrees below the horizon + and 6 degrees above. + + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. + :type direction: int + :param date: The date for which to calculate the times. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: A tuple of the UTC date and time at which the Golden Hour starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if date is None: + date = datetime.date.today() + + start = self.time_at_elevation_utc(-4, direction, date, + latitude, longitude) + end = self.time_at_elevation_utc(6, direction, date, + latitude, longitude) + + if direction == SUN_RISING: + return start, end + else: + return end, start + + def blue_hour_utc(self, direction, date, latitude, longitude): + """Returns the start and end times of the Blue Hour in the UTC timezone + when the sun is traversing in the specified direction. + + This method uses the definition from PhotoPills i.e. the + blue hour is when the sun is between 6 and 4 degrees below the horizon. + + :param direction: Determines whether the time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. + :type direction: int + :param date: The date for which to calculate the times. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: A tuple of the UTC date and time at which the Blue Hour starts and ends. + :rtype: (:class:`~datetime.datetime`, :class:`~datetime.datetime`) + """ + + if date is None: + date = datetime.date.today() + + start = self.time_at_elevation_utc(-6, direction, date, + latitude, longitude) + end = self.time_at_elevation_utc(-4, direction, date, + latitude, longitude) + + if direction == SUN_RISING: + return start, end + else: + return end, start + + def time_at_elevation_utc(self, elevation, direction, date, latitude, longitude): + """Calculate the time in the UTC timezone when the sun is at + the specified elevation on the specified date. + + Note: This method uses positive elevations for those above the horizon. + + :param elevation: Elevation in degrees above the horizon to calculate for. + :type elevation: float + :param direction: Determines whether the calculated time is for the sun rising or setting. + Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. + :type direction: int + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The UTC date and time at which the sun is at the required + elevation. + :rtype: :class:`~datetime.datetime` + """ + + if elevation > 90.0: + elevation = 180.0 - elevation + direction = SUN_SETTING + + depression = 90 - elevation + try: + return self._calc_time(depression, direction, date, latitude, longitude) + except Exception: + raise AstralError(('Sun never reaches an elevation of %d degrees' + 'at this location.') % elevation) + + def solar_azimuth(self, dateandtime, latitude, longitude): + """Calculate the azimuth angle of the sun. + + :param dateandtime: The date and time for which to calculate + the angle. + :type dateandtime: :class:`~datetime.datetime` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The azimuth angle in degrees clockwise from North. + :rtype: float + + If `dateandtime` is a naive Python datetime then it is assumed to be + in the UTC timezone. + """ + + if latitude > 89.8: + latitude = 89.8 + + if latitude < -89.8: + latitude = -89.8 + + if dateandtime.tzinfo is None: + zone = 0 + utc_datetime = dateandtime + else: + zone = -dateandtime.utcoffset().total_seconds() / 3600.0 + utc_datetime = dateandtime.astimezone(pytz.utc) + + timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + \ + (utc_datetime.second / 3600.0) + + JD = self._julianday(dateandtime) + t = self._jday_to_jcentury(JD + timenow / 24.0) + theta = self._sun_declination(t) + eqtime = self._eq_of_time(t) + solarDec = theta # in degrees + + solarTimeFix = eqtime - (4.0 * -longitude) + (60 * zone) + trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + \ + dateandtime.second / 60.0 + solarTimeFix + # in minutes + + while trueSolarTime > 1440: + trueSolarTime = trueSolarTime - 1440 + + hourangle = trueSolarTime / 4.0 - 180.0 + # Thanks to Louis Schwarzmayr for the next line: + if hourangle < -180: + hourangle = hourangle + 360.0 + + harad = radians(hourangle) + + csz = sin(radians(latitude)) * sin(radians(solarDec)) + \ + cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad) + + if csz > 1.0: + csz = 1.0 + elif csz < -1.0: + csz = -1.0 + + zenith = degrees(acos(csz)) + + azDenom = (cos(radians(latitude)) * sin(radians(zenith))) + + if (abs(azDenom) > 0.001): + azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - + sin(radians(solarDec))) / azDenom + + if abs(azRad) > 1.0: + if azRad < 0: + azRad = -1.0 + else: + azRad = 1.0 + + azimuth = 180.0 - degrees(acos(azRad)) + + if hourangle > 0.0: + azimuth = -azimuth + else: + if latitude > 0.0: + azimuth = 180.0 + else: + azimuth = 0.0 + + if azimuth < 0.0: + azimuth = azimuth + 360.0 + + return azimuth + + def solar_elevation(self, dateandtime, latitude, longitude): + """Calculate the elevation angle of the sun. + + :param dateandtime: The date and time for which to calculate + the angle. + :type dateandtime: :class:`~datetime.datetime` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The elevation angle in degrees above the horizon. + :rtype: float + + If `dateandtime` is a naive Python datetime then it is assumed to be + in the UTC timezone. + """ + + if latitude > 89.8: + latitude = 89.8 + + if latitude < -89.8: + latitude = -89.8 + + if dateandtime.tzinfo is None: + zone = 0 + utc_datetime = dateandtime + else: + zone = -dateandtime.utcoffset().total_seconds() / 3600.0 + utc_datetime = dateandtime.astimezone(pytz.utc) + + timenow = utc_datetime.hour + (utc_datetime.minute / 60.0) + \ + (utc_datetime.second / 3600) + + JD = self._julianday(dateandtime) + t = self._jday_to_jcentury(JD + timenow / 24.0) + theta = self._sun_declination(t) + eqtime = self._eq_of_time(t) + solarDec = theta # in degrees + + solarTimeFix = eqtime - (4.0 * -longitude) + (60 * zone) + trueSolarTime = dateandtime.hour * 60.0 + dateandtime.minute + \ + dateandtime.second / 60.0 + solarTimeFix + # in minutes + + while trueSolarTime > 1440: + trueSolarTime = trueSolarTime - 1440 + + hourangle = trueSolarTime / 4.0 - 180.0 + # Thanks to Louis Schwarzmayr for the next line: + if hourangle < -180: + hourangle = hourangle + 360.0 + + harad = radians(hourangle) + + csz = sin(radians(latitude)) * sin(radians(solarDec)) + \ + cos(radians(latitude)) * cos(radians(solarDec)) * cos(harad) + + if csz > 1.0: + csz = 1.0 + elif csz < -1.0: + csz = -1.0 + + zenith = degrees(acos(csz)) + + azDenom = (cos(radians(latitude)) * sin(radians(zenith))) + + if (abs(azDenom) > 0.001): + azRad = ((sin(radians(latitude)) * cos(radians(zenith))) - + sin(radians(solarDec))) / azDenom + + if abs(azRad) > 1.0: + if azRad < 0: + azRad = -1.0 + else: + azRad = 1.0 + + azimuth = 180.0 - degrees(acos(azRad)) + + if hourangle > 0.0: + azimuth = -azimuth + else: + if latitude > 0.0: + azimuth = 180.0 + else: + azimuth = 0.0 + + if azimuth < 0.0: + azimuth = azimuth + 360.0 + + exoatmElevation = 90.0 - zenith + + if exoatmElevation > 85.0: + refractionCorrection = 0.0 + else: + te = tan(radians(exoatmElevation)) + if exoatmElevation > 5.0: + refractionCorrection = 58.1 / te - 0.07 / (te * te * te) + \ + 0.000086 / (te * te * te * te * te) + elif exoatmElevation > -0.575: + step1 = (-12.79 + exoatmElevation * 0.711) + step2 = (103.4 + exoatmElevation * (step1)) + step3 = (-518.2 + exoatmElevation * (step2)) + refractionCorrection = 1735.0 + exoatmElevation * (step3) + else: + refractionCorrection = -20.774 / te + + refractionCorrection = refractionCorrection / 3600.0 + + solarzen = zenith - refractionCorrection + + solarelevation = 90.0 - solarzen + + return solarelevation + + def solar_zenith(self, dateandtime, latitude, longitude): + """Calculates the solar zenith angle. + + :param dateandtime: The date and time for which to calculate + the angle. + :type dateandtime: :class:`~datetime.datetime` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: The zenith angle in degrees above the horizon. + :rtype: float + + If `dateandtime` is a naive Python datetime then it is assumed to be + in the UTC timezone. + """ + + return self.solar_elevation(dateandtime, latitude, longitude) + + def moon_phase(self, date): + """Calculates the phase of the moon on the specified date. + + :param date: The date to calculate the phase for. + :type date: :class:`datetime.date` + + :return: + A number designating the phase + + | 0 = New moon + | 7 = First quarter + | 14 = Full moon + | 21 = Last quarter + :rtype: int + """ + + jd = self._julianday(date) + DT = pow((jd - 2382148), 2) / (41048480 * 86400) + T = (jd + DT - 2451545.0) / 36525 + T2 = pow(T, 2) + T3 = pow(T, 3) + D = 297.85 + (445267.1115 * T) - (0.0016300 * T2) + (T3 / 545868) + D = radians(self._proper_angle(D)) + M = 357.53 + (35999.0503 * T) + M = radians(self._proper_angle(M)) + M1 = 134.96 + (477198.8676 * T) + (0.0089970 * T2) + (T3 / 69699) + M1 = radians(self._proper_angle(M1)) + elong = degrees(D) + 6.29 * sin(M1) + elong -= 2.10 * sin(M) + elong += 1.27 * sin(2 * D - M1) + elong += 0.66 * sin(2 * D) + elong = self._proper_angle(elong) + elong = round(elong) + moon = ((elong + 6.43) / 360) * 28 + moon = floor(moon) + if moon == 28: + moon = 0 + + return moon + + def rahukaalam_utc(self, date, latitude, longitude): + """Calculate ruhakaalam times in the UTC timezone. + + :param date: Date to calculate for. + :type date: :class:`datetime.date` + :param latitude: Latitude - Northern latitudes should be positive + :type latitude: float + :param longitude: Longitude - Eastern longitudes should be positive + :type longitude: float + + :return: Tuple containing the start and end times for Rahukaalam. + :rtype: tuple + """ + + if date is None: + date = datetime.date.today() + + try: + sunrise = self.sunrise_utc(date, latitude, longitude) + sunset = self.sunset_utc(date, latitude, longitude) + except: + raise AstralError(('Sun remains below the horizon on this day, ' + 'at this location.')) + + octant_duration = (sunset - sunrise) / 8 + + # Mo,Sa,Fr,We,Th,Tu,Su + octant_index = [1, 6, 4, 5, 3, 2, 7] + + weekday = date.weekday() + octant = octant_index[weekday] + + start = sunrise + (octant_duration * octant) + end = start + octant_duration + + return start, end + + def _proper_angle(self, value): + if value > 0.0: + value /= 360.0 + return (value - floor(value)) * 360.0 + else: + tmp = ceil(abs(value / 360.0)) + return value + tmp * 360.0 + + def _julianday(self, date, timezone=None): + day = date.day + month = date.month + year = date.year + + if timezone: + offset = timezone.localize(datetime.datetime(year, month, day)).utcoffset() + offset = offset.total_seconds() / 1440.0 + day += offset + 0.5 + + if month <= 2: + year = year - 1 + month = month + 12 + + A = floor(year / 100.0) + B = 2 - A + floor(A / 4.0) + + jd = floor(365.25 * (year + 4716)) + floor(30.6001 * (month + 1)) + \ + day - 1524.5 + if jd > 2299160.4999999: + jd += B + + return jd + + def _jday_to_jcentury(self, julianday): + return (julianday - 2451545.0) / 36525.0 + + def _jcentury_to_jday(self, juliancentury): + return (juliancentury * 36525.0) + 2451545.0 + + def _mean_obliquity_of_ecliptic(self, juliancentury): + seconds = 21.448 - juliancentury * \ + (46.815 + juliancentury * (0.00059 - juliancentury * (0.001813))) + return 23.0 + (26.0 + (seconds / 60.0)) / 60.0 + + def _obliquity_correction(self, juliancentury): + e0 = self._mean_obliquity_of_ecliptic(juliancentury) + + omega = 125.04 - 1934.136 * juliancentury + return e0 + 0.00256 * cos(radians(omega)) + + def _geom_mean_long_sun(self, juliancentury): + l0 = 280.46646 + \ + juliancentury * (36000.76983 + 0.0003032 * juliancentury) + return l0 % 360.0 + + def _eccentrilocation_earth_orbit(self, juliancentury): + return 0.016708634 - \ + juliancentury * (0.000042037 + 0.0000001267 * juliancentury) + + def _geom_mean_anomaly_sun(self, juliancentury): + return 357.52911 + \ + juliancentury * (35999.05029 - 0.0001537 * juliancentury) + + def _eq_of_time(self, juliancentury): + epsilon = self._obliquity_correction(juliancentury) + l0 = self._geom_mean_long_sun(juliancentury) + e = self._eccentrilocation_earth_orbit(juliancentury) + m = self._geom_mean_anomaly_sun(juliancentury) + + y = tan(radians(epsilon) / 2.0) + y = y * y + + sin2l0 = sin(2.0 * radians(l0)) + sinm = sin(radians(m)) + cos2l0 = cos(2.0 * radians(l0)) + sin4l0 = sin(4.0 * radians(l0)) + sin2m = sin(2.0 * radians(m)) + + Etime = y * sin2l0 - 2.0 * e * sinm + 4.0 * e * y * sinm * cos2l0 - \ + 0.5 * y * y * sin4l0 - 1.25 * e * e * sin2m + + return degrees(Etime) * 4.0 + + def _sun_eq_of_center(self, juliancentury): + m = self._geom_mean_anomaly_sun(juliancentury) + + mrad = radians(m) + sinm = sin(mrad) + sin2m = sin(mrad + mrad) + sin3m = sin(mrad + mrad + mrad) + + c = sinm * (1.914602 - juliancentury * \ + (0.004817 + 0.000014 * juliancentury)) + \ + sin2m * (0.019993 - 0.000101 * juliancentury) + sin3m * 0.000289 + + return c + + def _sun_true_long(self, juliancentury): + l0 = self._geom_mean_long_sun(juliancentury) + c = self._sun_eq_of_center(juliancentury) + + return l0 + c + + def _sun_apparent_long(self, juliancentury): + O = self._sun_true_long(juliancentury) + + omega = 125.04 - 1934.136 * juliancentury + return O - 0.00569 - 0.00478 * sin(radians(omega)) + + def _sun_declination(self, juliancentury): + e = self._obliquity_correction(juliancentury) + lambd = self._sun_apparent_long(juliancentury) + + sint = sin(radians(e)) * sin(radians(lambd)) + return degrees(asin(sint)) + + def _sun_rad_vector(self, juliancentury): + v = self._sun_true_anomoly(juliancentury) + e = self._eccentrilocation_earth_orbit(juliancentury) + + return (1.000001018 * (1 - e * e)) / (1 + e * cos(radians(v))) + + def _sun_rt_ascension(self, juliancentury): + e = self._obliquity_correction(juliancentury) + lambd = self._sun_apparent_long(juliancentury) + + tananum = (cos(radians(e)) * sin(radians(lambd))) + tanadenom = (cos(radians(lambd))) + + return degrees(atan2(tananum, tanadenom)) + + def _sun_true_anomoly(self, juliancentury): + m = self._geom_mean_anomaly_sun(juliancentury) + c = self._sun_eq_of_center(juliancentury) + + return m + c + + def _hour_angle(self, latitude, declination, depression): + latitude_rad = radians(latitude) + declination_rad = radians(declination) + depression_rad = radians(depression) + + n = cos(depression_rad) + d = cos(latitude_rad) * cos(declination_rad) + + t = tan(latitude_rad) * tan(declination_rad) + h = (n / d) - t + + HA = acos(h) + return HA + + def _calc_time(self, depression, direction, date, latitude, longitude): + julianday = self._julianday(date) + + if latitude > 89.8: + latitude = 89.8 + + if latitude < -89.8: + latitude = -89.8 + + t = self._jday_to_jcentury(julianday) + eqtime = self._eq_of_time(t) + solarDec = self._sun_declination(t) + + hourangle = -self._hour_angle(latitude, solarDec, 90 + 0.833) + + delta = -longitude - degrees(hourangle) + timeDiff = 4.0 * delta + timeUTC = 720.0 + timeDiff - eqtime + + newt = self._jday_to_jcentury(self._jcentury_to_jday(t) + + timeUTC / 1440.0) + eqtime = self._eq_of_time(newt) + solarDec = self._sun_declination(newt) + + hourangle = self._hour_angle(latitude, solarDec, depression) + if direction == SUN_SETTING: + hourangle = -hourangle + + delta = -longitude - degrees(hourangle) + timeDiff = 4 * delta + timeUTC = 720 + timeDiff - eqtime + + timeUTC = timeUTC / 60.0 + hour = int(timeUTC) + minute = int((timeUTC - hour) * 60) + second = int((((timeUTC - hour) * 60) - minute) * 60) + + if second > 59: + second -= 60 + minute += 1 + elif second < 0: + second += 60 + minute -= 1 + + if minute > 59: + minute -= 60 + hour += 1 + elif minute < 0: + minute += 60 + hour -= 1 + + if hour > 23: + hour -= 24 + date += datetime.timedelta(days=1) + elif hour < 0: + hour += 24 + date -= datetime.timedelta(days=1) + + dt = datetime.datetime(date.year, date.month, date.day, + hour, minute, second) + dt = pytz.UTC.localize(dt) + + return dt diff --git a/deps/cherrypy/__init__.py b/deps/cherrypy/__init__.py new file mode 100644 index 00000000..e5a28e69 --- /dev/null +++ b/deps/cherrypy/__init__.py @@ -0,0 +1,375 @@ +"""CherryPy is a pythonic, object-oriented HTTP framework. + + +CherryPy consists of not one, but four separate API layers. + +The APPLICATION LAYER is the simplest. CherryPy applications are written as +a tree of classes and methods, where each branch in the tree corresponds to +a branch in the URL path. Each method is a 'page handler', which receives +GET and POST params as keyword arguments, and returns or yields the (HTML) +body of the response. The special method name 'index' is used for paths +that end in a slash, and the special method name 'default' is used to +handle multiple paths via a single handler. This layer also includes: + + * the 'exposed' attribute (and cherrypy.expose) + * cherrypy.quickstart() + * _cp_config attributes + * cherrypy.tools (including cherrypy.session) + * cherrypy.url() + +The ENVIRONMENT LAYER is used by developers at all levels. It provides +information about the current request and response, plus the application +and server environment, via a (default) set of top-level objects: + + * cherrypy.request + * cherrypy.response + * cherrypy.engine + * cherrypy.server + * cherrypy.tree + * cherrypy.config + * cherrypy.thread_data + * cherrypy.log + * cherrypy.HTTPError, NotFound, and HTTPRedirect + * cherrypy.lib + +The EXTENSION LAYER allows advanced users to construct and share their own +plugins. It consists of: + + * Hook API + * Tool API + * Toolbox API + * Dispatch API + * Config Namespace API + +Finally, there is the CORE LAYER, which uses the core API's to construct +the default components which are available at higher layers. You can think +of the default components as the 'reference implementation' for CherryPy. +Megaframeworks (and advanced users) may replace the default components +with customized or extended components. The core API's are: + + * Application API + * Engine API + * Request API + * Server API + * WSGI API + +These API's are described in the `CherryPy specification `_. +""" + +try: + import pkg_resources +except ImportError: + pass + +from cherrypy._cperror import HTTPError, HTTPRedirect, InternalRedirect # noqa +from cherrypy._cperror import NotFound, CherryPyException, TimeoutError # noqa + +from cherrypy import _cpdispatch as dispatch # noqa + +from cherrypy import _cptools +tools = _cptools.default_toolbox +Tool = _cptools.Tool + +from cherrypy import _cprequest +from cherrypy.lib import httputil as _httputil + +from cherrypy import _cptree +tree = _cptree.Tree() +from cherrypy._cptree import Application # noqa +from cherrypy import _cpwsgi as wsgi # noqa + +from cherrypy import process +try: + from cherrypy.process import win32 + engine = win32.Win32Bus() + engine.console_control_handler = win32.ConsoleCtrlHandler(engine) + del win32 +except ImportError: + engine = process.bus + + +try: + __version__ = pkg_resources.require('cherrypy')[0].version +except Exception: + __version__ = 'unknown' + + +# Timeout monitor. We add two channels to the engine +# to which cherrypy.Application will publish. +engine.listeners['before_request'] = set() +engine.listeners['after_request'] = set() + + +class _TimeoutMonitor(process.plugins.Monitor): + + def __init__(self, bus): + self.servings = [] + process.plugins.Monitor.__init__(self, bus, self.run) + + def before_request(self): + self.servings.append((serving.request, serving.response)) + + def after_request(self): + try: + self.servings.remove((serving.request, serving.response)) + except ValueError: + pass + + def run(self): + """Check timeout on all responses. (Internal)""" + for req, resp in self.servings: + resp.check_timeout() +engine.timeout_monitor = _TimeoutMonitor(engine) +engine.timeout_monitor.subscribe() + +engine.autoreload = process.plugins.Autoreloader(engine) +engine.autoreload.subscribe() + +engine.thread_manager = process.plugins.ThreadManager(engine) +engine.thread_manager.subscribe() + +engine.signal_handler = process.plugins.SignalHandler(engine) + + +class _HandleSignalsPlugin(object): + + """Handle signals from other processes based on the configured + platform handlers above.""" + + def __init__(self, bus): + self.bus = bus + + def subscribe(self): + """Add the handlers based on the platform""" + if hasattr(self.bus, "signal_handler"): + self.bus.signal_handler.subscribe() + if hasattr(self.bus, "console_control_handler"): + self.bus.console_control_handler.subscribe() + +engine.signals = _HandleSignalsPlugin(engine) + + +from cherrypy import _cpserver +server = _cpserver.Server() +server.subscribe() + + +def quickstart(root=None, script_name="", config=None): + """Mount the given root, start the builtin server (and engine), then block. + + root: an instance of a "controller class" (a collection of page handler + methods) which represents the root of the application. + script_name: a string containing the "mount point" of the application. + This should start with a slash, and be the path portion of the URL + at which to mount the given root. For example, if root.index() will + handle requests to "http://www.example.com:8080/dept/app1/", then + the script_name argument would be "/dept/app1". + + It MUST NOT end in a slash. If the script_name refers to the root + of the URI, it MUST be an empty string (not "/"). + config: a file or dict containing application config. If this contains + a [global] section, those entries will be used in the global + (site-wide) config. + """ + if config: + _global_conf_alias.update(config) + + tree.mount(root, script_name, config) + + engine.signals.subscribe() + engine.start() + engine.block() + + +from cherrypy._cpcompat import threadlocal as _local + + +class _Serving(_local): + + """An interface for registering request and response objects. + + Rather than have a separate "thread local" object for the request and + the response, this class works as a single threadlocal container for + both objects (and any others which developers wish to define). In this + way, we can easily dump those objects when we stop/start a new HTTP + conversation, yet still refer to them as module-level globals in a + thread-safe way. + """ + + request = _cprequest.Request(_httputil.Host("127.0.0.1", 80), + _httputil.Host("127.0.0.1", 1111)) + """ + The request object for the current thread. In the main thread, + and any threads which are not receiving HTTP requests, this is None.""" + + response = _cprequest.Response() + """ + The response object for the current thread. In the main thread, + and any threads which are not receiving HTTP requests, this is None.""" + + def load(self, request, response): + self.request = request + self.response = response + + def clear(self): + """Remove all attributes of self.""" + self.__dict__.clear() + +serving = _Serving() + + +class _ThreadLocalProxy(object): + + __slots__ = ['__attrname__', '__dict__'] + + def __init__(self, attrname): + self.__attrname__ = attrname + + def __getattr__(self, name): + child = getattr(serving, self.__attrname__) + return getattr(child, name) + + def __setattr__(self, name, value): + if name in ("__attrname__", ): + object.__setattr__(self, name, value) + else: + child = getattr(serving, self.__attrname__) + setattr(child, name, value) + + def __delattr__(self, name): + child = getattr(serving, self.__attrname__) + delattr(child, name) + + def _get_dict(self): + child = getattr(serving, self.__attrname__) + d = child.__class__.__dict__.copy() + d.update(child.__dict__) + return d + __dict__ = property(_get_dict) + + def __getitem__(self, key): + child = getattr(serving, self.__attrname__) + return child[key] + + def __setitem__(self, key, value): + child = getattr(serving, self.__attrname__) + child[key] = value + + def __delitem__(self, key): + child = getattr(serving, self.__attrname__) + del child[key] + + def __contains__(self, key): + child = getattr(serving, self.__attrname__) + return key in child + + def __len__(self): + child = getattr(serving, self.__attrname__) + return len(child) + + def __nonzero__(self): + child = getattr(serving, self.__attrname__) + return bool(child) + # Python 3 + __bool__ = __nonzero__ + +# Create request and response object (the same objects will be used +# throughout the entire life of the webserver, but will redirect +# to the "serving" object) +request = _ThreadLocalProxy('request') +response = _ThreadLocalProxy('response') + +# Create thread_data object as a thread-specific all-purpose storage + + +class _ThreadData(_local): + + """A container for thread-specific data.""" +thread_data = _ThreadData() + + +# Monkeypatch pydoc to allow help() to go through the threadlocal proxy. +# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve. +# The only other way would be to change what is returned from type(request) +# and that's not possible in pure Python (you'd have to fake ob_type). +def _cherrypy_pydoc_resolve(thing, forceload=0): + """Given an object or a path to an object, get the object and its name.""" + if isinstance(thing, _ThreadLocalProxy): + thing = getattr(serving, thing.__attrname__) + return _pydoc._builtin_resolve(thing, forceload) + +try: + import pydoc as _pydoc + _pydoc._builtin_resolve = _pydoc.resolve + _pydoc.resolve = _cherrypy_pydoc_resolve +except ImportError: + pass + + +from cherrypy import _cplogging + + +class _GlobalLogManager(_cplogging.LogManager): + + """A site-wide LogManager; routes to app.log or global log as appropriate. + + This :class:`LogManager` implements + cherrypy.log() and cherrypy.log.access(). If either + function is called during a request, the message will be sent to the + logger for the current Application. If they are called outside of a + request, the message will be sent to the site-wide logger. + """ + + def __call__(self, *args, **kwargs): + """Log the given message to the app.log or global log as appropriate. + """ + # Do NOT use try/except here. See + # https://github.com/cherrypy/cherrypy/issues/945 + if hasattr(request, 'app') and hasattr(request.app, 'log'): + log = request.app.log + else: + log = self + return log.error(*args, **kwargs) + + def access(self): + """Log an access message to the app.log or global log as appropriate. + """ + try: + return request.app.log.access() + except AttributeError: + return _cplogging.LogManager.access(self) + + +log = _GlobalLogManager() +# Set a default screen handler on the global log. +log.screen = True +log.error_file = '' +# Using an access file makes CP about 10% slower. Leave off by default. +log.access_file = '' + + +def _buslog(msg, level): + log.error(msg, 'ENGINE', severity=level) +engine.subscribe('log', _buslog) + +from cherrypy._helper import expose, popargs, url # noqa + +# import _cpconfig last so it can reference other top-level objects +from cherrypy import _cpconfig +# Use _global_conf_alias so quickstart can use 'config' as an arg +# without shadowing cherrypy.config. +config = _global_conf_alias = _cpconfig.Config() +config.defaults = { + 'tools.log_tracebacks.on': True, + 'tools.log_headers.on': True, + 'tools.trailing_slash.on': True, + 'tools.encode.on': True +} +config.namespaces["log"] = lambda k, v: setattr(log, k, v) +config.namespaces["checker"] = lambda k, v: setattr(checker, k, v) +# Must reset to get our defaults applied. +config.reset() + +from cherrypy import _cpchecker +checker = _cpchecker.Checker() +engine.subscribe('start', checker) diff --git a/deps/cherrypy/__main__.py b/deps/cherrypy/__main__.py new file mode 100644 index 00000000..b1c9c012 --- /dev/null +++ b/deps/cherrypy/__main__.py @@ -0,0 +1,4 @@ +import cherrypy.daemon + +if __name__ == '__main__': + cherrypy.daemon.run() diff --git a/deps/cherrypy/__pycache__/__init__.cpython-34.pyc b/deps/cherrypy/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..c5890d1a Binary files /dev/null and b/deps/cherrypy/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/__main__.cpython-34.pyc b/deps/cherrypy/__pycache__/__main__.cpython-34.pyc new file mode 100644 index 00000000..a4414c7c Binary files /dev/null and b/deps/cherrypy/__pycache__/__main__.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpchecker.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpchecker.cpython-34.pyc new file mode 100644 index 00000000..ff7bea23 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpchecker.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpcompat.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpcompat.cpython-34.pyc new file mode 100644 index 00000000..ec7a3cdd Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpcompat.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpconfig.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpconfig.cpython-34.pyc new file mode 100644 index 00000000..00f3417e Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpconfig.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpdispatch.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpdispatch.cpython-34.pyc new file mode 100644 index 00000000..e51b0fd0 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpdispatch.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cperror.cpython-34.pyc b/deps/cherrypy/__pycache__/_cperror.cpython-34.pyc new file mode 100644 index 00000000..6d589b2c Binary files /dev/null and b/deps/cherrypy/__pycache__/_cperror.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cplogging.cpython-34.pyc b/deps/cherrypy/__pycache__/_cplogging.cpython-34.pyc new file mode 100644 index 00000000..fd0e91a0 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cplogging.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpmodpy.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpmodpy.cpython-34.pyc new file mode 100644 index 00000000..c2c4b403 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpmodpy.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpnative_server.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpnative_server.cpython-34.pyc new file mode 100644 index 00000000..21e02f9a Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpnative_server.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpreqbody.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpreqbody.cpython-34.pyc new file mode 100644 index 00000000..f56f101b Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpreqbody.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cprequest.cpython-34.pyc b/deps/cherrypy/__pycache__/_cprequest.cpython-34.pyc new file mode 100644 index 00000000..acca5403 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cprequest.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpserver.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpserver.cpython-34.pyc new file mode 100644 index 00000000..53e51111 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpserver.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpthreadinglocal.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpthreadinglocal.cpython-34.pyc new file mode 100644 index 00000000..0f374695 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpthreadinglocal.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cptools.cpython-34.pyc b/deps/cherrypy/__pycache__/_cptools.cpython-34.pyc new file mode 100644 index 00000000..800575c9 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cptools.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cptree.cpython-34.pyc b/deps/cherrypy/__pycache__/_cptree.cpython-34.pyc new file mode 100644 index 00000000..12edd5c0 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cptree.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpwsgi.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpwsgi.cpython-34.pyc new file mode 100644 index 00000000..a434575f Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpwsgi.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_cpwsgi_server.cpython-34.pyc b/deps/cherrypy/__pycache__/_cpwsgi_server.cpython-34.pyc new file mode 100644 index 00000000..331f44a9 Binary files /dev/null and b/deps/cherrypy/__pycache__/_cpwsgi_server.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/_helper.cpython-34.pyc b/deps/cherrypy/__pycache__/_helper.cpython-34.pyc new file mode 100644 index 00000000..a6e3efbb Binary files /dev/null and b/deps/cherrypy/__pycache__/_helper.cpython-34.pyc differ diff --git a/deps/cherrypy/__pycache__/daemon.cpython-34.pyc b/deps/cherrypy/__pycache__/daemon.cpython-34.pyc new file mode 100644 index 00000000..8a3e23b6 Binary files /dev/null and b/deps/cherrypy/__pycache__/daemon.cpython-34.pyc differ diff --git a/deps/cherrypy/_cpchecker.py b/deps/cherrypy/_cpchecker.py new file mode 100644 index 00000000..4ef82597 --- /dev/null +++ b/deps/cherrypy/_cpchecker.py @@ -0,0 +1,332 @@ +import os +import warnings + +import cherrypy +from cherrypy._cpcompat import iteritems, copykeys, builtins + + +class Checker(object): + + """A checker for CherryPy sites and their mounted applications. + + When this object is called at engine startup, it executes each + of its own methods whose names start with ``check_``. If you wish + to disable selected checks, simply add a line in your global + config which sets the appropriate method to False:: + + [global] + checker.check_skipped_app_config = False + + You may also dynamically add or replace ``check_*`` methods in this way. + """ + + on = True + """If True (the default), run all checks; if False, turn off all checks.""" + + def __init__(self): + self._populate_known_types() + + def __call__(self): + """Run all check_* methods.""" + if self.on: + oldformatwarning = warnings.formatwarning + warnings.formatwarning = self.formatwarning + try: + for name in dir(self): + if name.startswith("check_"): + method = getattr(self, name) + if method and hasattr(method, '__call__'): + method() + finally: + warnings.formatwarning = oldformatwarning + + def formatwarning(self, message, category, filename, lineno, line=None): + """Function to format a warning.""" + return "CherryPy Checker:\n%s\n\n" % message + + # This value should be set inside _cpconfig. + global_config_contained_paths = False + + def check_app_config_entries_dont_start_with_script_name(self): + """Check for Application config with sections that repeat script_name. + """ + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + if not app.config: + continue + if sn == '': + continue + sn_atoms = sn.strip("/").split("/") + for key in app.config.keys(): + key_atoms = key.strip("/").split("/") + if key_atoms[:len(sn_atoms)] == sn_atoms: + warnings.warn( + "The application mounted at %r has config " + "entries that start with its script name: %r" % (sn, + key)) + + def check_site_config_entries_in_app_config(self): + """Check for mounted Applications that have site-scoped config.""" + for sn, app in iteritems(cherrypy.tree.apps): + if not isinstance(app, cherrypy.Application): + continue + + msg = [] + for section, entries in iteritems(app.config): + if section.startswith('/'): + for key, value in iteritems(entries): + for n in ("engine.", "server.", "tree.", "checker."): + if key.startswith(n): + msg.append("[%s] %s = %s" % + (section, key, value)) + if msg: + msg.insert(0, + "The application mounted at %r contains the " + "following config entries, which are only allowed " + "in site-wide config. Move them to a [global] " + "section and pass them to cherrypy.config.update() " + "instead of tree.mount()." % sn) + warnings.warn(os.linesep.join(msg)) + + def check_skipped_app_config(self): + """Check for mounted Applications that have no config.""" + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + if not app.config: + msg = "The Application mounted at %r has an empty config." % sn + if self.global_config_contained_paths: + msg += (" It looks like the config you passed to " + "cherrypy.config.update() contains application-" + "specific sections. You must explicitly pass " + "application config via " + "cherrypy.tree.mount(..., config=app_config)") + warnings.warn(msg) + return + + def check_app_config_brackets(self): + """Check for Application config with extraneous brackets in section + names. + """ + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + if not app.config: + continue + for key in app.config.keys(): + if key.startswith("[") or key.endswith("]"): + warnings.warn( + "The application mounted at %r has config " + "section names with extraneous brackets: %r. " + "Config *files* need brackets; config *dicts* " + "(e.g. passed to tree.mount) do not." % (sn, key)) + + def check_static_paths(self): + """Check Application config for incorrect static paths.""" + # Use the dummy Request object in the main thread. + request = cherrypy.request + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + request.app = app + for section in app.config: + # get_resource will populate request.config + request.get_resource(section + "/dummy.html") + conf = request.config.get + + if conf("tools.staticdir.on", False): + msg = "" + root = conf("tools.staticdir.root") + dir = conf("tools.staticdir.dir") + if dir is None: + msg = "tools.staticdir.dir is not set." + else: + fulldir = "" + if os.path.isabs(dir): + fulldir = dir + if root: + msg = ("dir is an absolute path, even " + "though a root is provided.") + testdir = os.path.join(root, dir[1:]) + if os.path.exists(testdir): + msg += ( + "\nIf you meant to serve the " + "filesystem folder at %r, remove the " + "leading slash from dir." % (testdir,)) + else: + if not root: + msg = ( + "dir is a relative path and " + "no root provided.") + else: + fulldir = os.path.join(root, dir) + if not os.path.isabs(fulldir): + msg = ("%r is not an absolute path." % ( + fulldir,)) + + if fulldir and not os.path.exists(fulldir): + if msg: + msg += "\n" + msg += ("%r (root + dir) is not an existing " + "filesystem path." % fulldir) + + if msg: + warnings.warn("%s\nsection: [%s]\nroot: %r\ndir: %r" + % (msg, section, root, dir)) + + # -------------------------- Compatibility -------------------------- # + obsolete = { + 'server.default_content_type': 'tools.response_headers.headers', + 'log_access_file': 'log.access_file', + 'log_config_options': None, + 'log_file': 'log.error_file', + 'log_file_not_found': None, + 'log_request_headers': 'tools.log_headers.on', + 'log_to_screen': 'log.screen', + 'show_tracebacks': 'request.show_tracebacks', + 'throw_errors': 'request.throw_errors', + 'profiler.on': ('cherrypy.tree.mount(profiler.make_app(' + 'cherrypy.Application(Root())))'), + } + + deprecated = {} + + def _compat(self, config): + """Process config and warn on each obsolete or deprecated entry.""" + for section, conf in config.items(): + if isinstance(conf, dict): + for k, v in conf.items(): + if k in self.obsolete: + warnings.warn("%r is obsolete. Use %r instead.\n" + "section: [%s]" % + (k, self.obsolete[k], section)) + elif k in self.deprecated: + warnings.warn("%r is deprecated. Use %r instead.\n" + "section: [%s]" % + (k, self.deprecated[k], section)) + else: + if section in self.obsolete: + warnings.warn("%r is obsolete. Use %r instead." + % (section, self.obsolete[section])) + elif section in self.deprecated: + warnings.warn("%r is deprecated. Use %r instead." + % (section, self.deprecated[section])) + + def check_compatibility(self): + """Process config and warn on each obsolete or deprecated entry.""" + self._compat(cherrypy.config) + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + self._compat(app.config) + + # ------------------------ Known Namespaces ------------------------ # + extra_config_namespaces = [] + + def _known_ns(self, app): + ns = ["wsgi"] + ns.extend(copykeys(app.toolboxes)) + ns.extend(copykeys(app.namespaces)) + ns.extend(copykeys(app.request_class.namespaces)) + ns.extend(copykeys(cherrypy.config.namespaces)) + ns += self.extra_config_namespaces + + for section, conf in app.config.items(): + is_path_section = section.startswith("/") + if is_path_section and isinstance(conf, dict): + for k, v in conf.items(): + atoms = k.split(".") + if len(atoms) > 1: + if atoms[0] not in ns: + # Spit out a special warning if a known + # namespace is preceded by "cherrypy." + if atoms[0] == "cherrypy" and atoms[1] in ns: + msg = ( + "The config entry %r is invalid; " + "try %r instead.\nsection: [%s]" + % (k, ".".join(atoms[1:]), section)) + else: + msg = ( + "The config entry %r is invalid, " + "because the %r config namespace " + "is unknown.\n" + "section: [%s]" % (k, atoms[0], section)) + warnings.warn(msg) + elif atoms[0] == "tools": + if atoms[1] not in dir(cherrypy.tools): + msg = ( + "The config entry %r may be invalid, " + "because the %r tool was not found.\n" + "section: [%s]" % (k, atoms[1], section)) + warnings.warn(msg) + + def check_config_namespaces(self): + """Process config and warn on each unknown config namespace.""" + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + self._known_ns(app) + + # -------------------------- Config Types -------------------------- # + known_config_types = {} + + def _populate_known_types(self): + b = [x for x in vars(builtins).values() + if type(x) is type(str)] + + def traverse(obj, namespace): + for name in dir(obj): + # Hack for 3.2's warning about body_params + if name == 'body_params': + continue + vtype = type(getattr(obj, name, None)) + if vtype in b: + self.known_config_types[namespace + "." + name] = vtype + + traverse(cherrypy.request, "request") + traverse(cherrypy.response, "response") + traverse(cherrypy.server, "server") + traverse(cherrypy.engine, "engine") + traverse(cherrypy.log, "log") + + def _known_types(self, config): + msg = ("The config entry %r in section %r is of type %r, " + "which does not match the expected type %r.") + + for section, conf in config.items(): + if isinstance(conf, dict): + for k, v in conf.items(): + if v is not None: + expected_type = self.known_config_types.get(k, None) + vtype = type(v) + if expected_type and vtype != expected_type: + warnings.warn(msg % (k, section, vtype.__name__, + expected_type.__name__)) + else: + k, v = section, conf + if v is not None: + expected_type = self.known_config_types.get(k, None) + vtype = type(v) + if expected_type and vtype != expected_type: + warnings.warn(msg % (k, section, vtype.__name__, + expected_type.__name__)) + + def check_config_types(self): + """Assert that config values are of the same type as default values.""" + self._known_types(cherrypy.config) + for sn, app in cherrypy.tree.apps.items(): + if not isinstance(app, cherrypy.Application): + continue + self._known_types(app.config) + + # -------------------- Specific config warnings -------------------- # + def check_localhost(self): + """Warn if any socket_host is 'localhost'. See #711.""" + for k, v in cherrypy.config.items(): + if k == 'server.socket_host' and v == 'localhost': + warnings.warn("The use of 'localhost' as a socket host can " + "cause problems on newer systems, since " + "'localhost' can map to either an IPv4 or an " + "IPv6 address. You should use '127.0.0.1' " + "or '[::1]' instead.") diff --git a/deps/cherrypy/_cpcompat.py b/deps/cherrypy/_cpcompat.py new file mode 100644 index 00000000..9fa6c4e8 --- /dev/null +++ b/deps/cherrypy/_cpcompat.py @@ -0,0 +1,330 @@ +"""Compatibility code for using CherryPy with various versions of Python. + +CherryPy 3.2 is compatible with Python versions 2.6+. This module provides a +useful abstraction over the differences between Python versions, sometimes by +preferring a newer idiom, sometimes an older one, and sometimes a custom one. + +In particular, Python 2 uses str and '' for byte strings, while Python 3 +uses str and '' for unicode strings. We will call each of these the 'native +string' type for each version. Because of this major difference, this module +provides +two functions: 'ntob', which translates native strings (of type 'str') into +byte strings regardless of Python version, and 'ntou', which translates native +strings to unicode strings. This also provides a 'BytesIO' name for dealing +specifically with bytes, and a 'StringIO' name for dealing with native strings. +It also provides a 'base64_decode' function with native strings as input and +output. +""" +import os +import re +import sys +import threading + +import six + +if six.PY3: + def ntob(n, encoding='ISO-8859-1'): + """Return the given native string as a byte string in the given + encoding. + """ + assert_native(n) + # In Python 3, the native string type is unicode + return n.encode(encoding) + + def ntou(n, encoding='ISO-8859-1'): + """Return the given native string as a unicode string with the given + encoding. + """ + assert_native(n) + # In Python 3, the native string type is unicode + return n + + def tonative(n, encoding='ISO-8859-1'): + """Return the given string as a native string in the given encoding.""" + # In Python 3, the native string type is unicode + if isinstance(n, bytes): + return n.decode(encoding) + return n +else: + # Python 2 + def ntob(n, encoding='ISO-8859-1'): + """Return the given native string as a byte string in the given + encoding. + """ + assert_native(n) + # In Python 2, the native string type is bytes. Assume it's already + # in the given encoding, which for ISO-8859-1 is almost always what + # was intended. + return n + + def ntou(n, encoding='ISO-8859-1'): + """Return the given native string as a unicode string with the given + encoding. + """ + assert_native(n) + # In Python 2, the native string type is bytes. + # First, check for the special encoding 'escape'. The test suite uses + # this to signal that it wants to pass a string with embedded \uXXXX + # escapes, but without having to prefix it with u'' for Python 2, + # but no prefix for Python 3. + if encoding == 'escape': + return unicode( + re.sub(r'\\u([0-9a-zA-Z]{4})', + lambda m: unichr(int(m.group(1), 16)), + n.decode('ISO-8859-1'))) + # Assume it's already in the given encoding, which for ISO-8859-1 + # is almost always what was intended. + return n.decode(encoding) + + def tonative(n, encoding='ISO-8859-1'): + """Return the given string as a native string in the given encoding.""" + # In Python 2, the native string type is bytes. + if isinstance(n, unicode): + return n.encode(encoding) + return n + + +def assert_native(n): + if not isinstance(n, str): + raise TypeError("n must be a native str (got %s)" % type(n).__name__) + +try: + # Python 3.1+ + from base64 import decodebytes as _base64_decodebytes +except ImportError: + # Python 3.0- + # since CherryPy claims compability with Python 2.3, we must use + # the legacy API of base64 + from base64 import decodestring as _base64_decodebytes + + +def base64_decode(n, encoding='ISO-8859-1'): + """Return the native string base64-decoded (as a native string).""" + if isinstance(n, six.text_type): + b = n.encode(encoding) + else: + b = n + b = _base64_decodebytes(b) + if str is six.text_type: + return b.decode(encoding) + else: + return b + + +try: + sorted = sorted +except NameError: + def sorted(i): + i = i[:] + i.sort() + return i + +try: + reversed = reversed +except NameError: + def reversed(x): + i = len(x) + while i > 0: + i -= 1 + yield x[i] + +try: + # Python 3 + from urllib.parse import urljoin, urlencode + from urllib.parse import quote, quote_plus + from urllib.request import unquote, urlopen + from urllib.request import parse_http_list, parse_keqv_list +except ImportError: + # Python 2 + from urlparse import urljoin # noqa + from urllib import urlencode, urlopen # noqa + from urllib import quote, quote_plus # noqa + from urllib import unquote # noqa + from urllib2 import parse_http_list, parse_keqv_list # noqa + +try: + from threading import local as threadlocal +except ImportError: + from cherrypy._cpthreadinglocal import local as threadlocal # noqa + +try: + dict.iteritems + # Python 2 + iteritems = lambda d: d.iteritems() + copyitems = lambda d: d.items() +except AttributeError: + # Python 3 + iteritems = lambda d: d.items() + copyitems = lambda d: list(d.items()) + +try: + dict.iterkeys + # Python 2 + iterkeys = lambda d: d.iterkeys() + copykeys = lambda d: d.keys() +except AttributeError: + # Python 3 + iterkeys = lambda d: d.keys() + copykeys = lambda d: list(d.keys()) + +try: + dict.itervalues + # Python 2 + itervalues = lambda d: d.itervalues() + copyvalues = lambda d: d.values() +except AttributeError: + # Python 3 + itervalues = lambda d: d.values() + copyvalues = lambda d: list(d.values()) + +try: + # Python 3 + import builtins +except ImportError: + # Python 2 + import __builtin__ as builtins # noqa + +try: + # Python 2. We try Python 2 first clients on Python 2 + # don't try to import the 'http' module from cherrypy.lib + from Cookie import SimpleCookie, CookieError + from httplib import BadStatusLine, HTTPConnection, IncompleteRead + from httplib import NotConnected + from BaseHTTPServer import BaseHTTPRequestHandler +except ImportError: + # Python 3 + from http.cookies import SimpleCookie, CookieError # noqa + from http.client import BadStatusLine, HTTPConnection, IncompleteRead # noqa + from http.client import NotConnected # noqa + from http.server import BaseHTTPRequestHandler # noqa + +# Some platforms don't expose HTTPSConnection, so handle it separately +if six.PY3: + try: + from http.client import HTTPSConnection + except ImportError: + # Some platforms which don't have SSL don't expose HTTPSConnection + HTTPSConnection = None +else: + try: + from httplib import HTTPSConnection + except ImportError: + HTTPSConnection = None + +try: + # Python 2 + xrange = xrange +except NameError: + # Python 3 + xrange = range + +import threading +if hasattr(threading.Thread, "daemon"): + # Python 2.6+ + def get_daemon(t): + return t.daemon + + def set_daemon(t, val): + t.daemon = val +else: + def get_daemon(t): + return t.isDaemon() + + def set_daemon(t, val): + t.setDaemon(val) + +try: + # Python 3 + from urllib.parse import unquote as parse_unquote + + def unquote_qs(atom, encoding, errors='strict'): + return parse_unquote( + atom.replace('+', ' '), + encoding=encoding, + errors=errors) +except ImportError: + # Python 2 + from urllib import unquote as parse_unquote + + def unquote_qs(atom, encoding, errors='strict'): + return parse_unquote(atom.replace('+', ' ')).decode(encoding, errors) + +try: + # Prefer simplejson, which is usually more advanced than the builtin + # module. + import simplejson as json + json_decode = json.JSONDecoder().decode + _json_encode = json.JSONEncoder().iterencode +except ImportError: + if sys.version_info >= (2, 6): + # Python >=2.6 : json is part of the standard library + import json + json_decode = json.JSONDecoder().decode + _json_encode = json.JSONEncoder().iterencode + else: + json = None + + def json_decode(s): + raise ValueError('No JSON library is available') + + def _json_encode(s): + raise ValueError('No JSON library is available') +finally: + if json and six.PY3: + # The two Python 3 implementations (simplejson/json) + # outputs str. We need bytes. + def json_encode(value): + for chunk in _json_encode(value): + yield chunk.encode('utf8') + else: + json_encode = _json_encode + +text_or_bytes = six.text_type, six.binary_type + +try: + import cPickle as pickle +except ImportError: + # In Python 2, pickle is a Python version. + # In Python 3, pickle is the sped-up C version. + import pickle # noqa + +import binascii + +def random20(): + return binascii.hexlify(os.urandom(20)).decode('ascii') + +try: + from _thread import get_ident as get_thread_ident +except ImportError: + from thread import get_ident as get_thread_ident # noqa + +try: + # Python 3 + next = next +except NameError: + # Python 2 + def next(i): + return i.next() + +if sys.version_info >= (3, 3): + Timer = threading.Timer + Event = threading.Event +else: + # Python 3.2 and earlier + Timer = threading._Timer + Event = threading._Event + +# Prior to Python 2.6, the Thread class did not have a .daemon property. +# This mix-in adds that property. + + +class SetDaemonProperty: + + def __get_daemon(self): + return self.isDaemon() + + def __set_daemon(self, daemon): + self.setDaemon(daemon) + + if sys.version_info < (2, 6): + daemon = property(__get_daemon, __set_daemon) diff --git a/deps/cherrypy/_cpconfig.py b/deps/cherrypy/_cpconfig.py new file mode 100644 index 00000000..7afb9131 --- /dev/null +++ b/deps/cherrypy/_cpconfig.py @@ -0,0 +1,303 @@ +""" +Configuration system for CherryPy. + +Configuration in CherryPy is implemented via dictionaries. Keys are strings +which name the mapped value, which may be of any type. + + +Architecture +------------ + +CherryPy Requests are part of an Application, which runs in a global context, +and configuration data may apply to any of those three scopes: + +Global + Configuration entries which apply everywhere are stored in + cherrypy.config. + +Application + Entries which apply to each mounted application are stored + on the Application object itself, as 'app.config'. This is a two-level + dict where each key is a path, or "relative URL" (for example, "/" or + "/path/to/my/page"), and each value is a config dict. Usually, this + data is provided in the call to tree.mount(root(), config=conf), + although you may also use app.merge(conf). + +Request + Each Request object possesses a single 'Request.config' dict. + Early in the request process, this dict is populated by merging global + config entries, Application entries (whose path equals or is a parent + of Request.path_info), and any config acquired while looking up the + page handler (see next). + + +Declaration +----------- + +Configuration data may be supplied as a Python dictionary, as a filename, +or as an open file object. When you supply a filename or file, CherryPy +uses Python's builtin ConfigParser; you declare Application config by +writing each path as a section header:: + + [/path/to/my/page] + request.stream = True + +To declare global configuration entries, place them in a [global] section. + +You may also declare config entries directly on the classes and methods +(page handlers) that make up your CherryPy application via the ``_cp_config`` +attribute, set with the ``cherrypy.config`` decorator. For example:: + + @cherrypy.config(**{'tools.gzip.on': True}) + class Demo: + + @cherrypy.expose + @cherrypy.config(**{'request.show_tracebacks': False}) + def index(self): + return "Hello world" + +.. note:: + + This behavior is only guaranteed for the default dispatcher. + Other dispatchers may have different restrictions on where + you can attach config attributes. + + +Namespaces +---------- + +Configuration keys are separated into namespaces by the first "." in the key. +Current namespaces: + +engine + Controls the 'application engine', including autoreload. + These can only be declared in the global config. + +tree + Grafts cherrypy.Application objects onto cherrypy.tree. + These can only be declared in the global config. + +hooks + Declares additional request-processing functions. + +log + Configures the logging for each application. + These can only be declared in the global or / config. + +request + Adds attributes to each Request. + +response + Adds attributes to each Response. + +server + Controls the default HTTP server via cherrypy.server. + These can only be declared in the global config. + +tools + Runs and configures additional request-processing packages. + +wsgi + Adds WSGI middleware to an Application's "pipeline". + These can only be declared in the app's root config ("/"). + +checker + Controls the 'checker', which looks for common errors in + app state (including config) when the engine starts. + Global config only. + +The only key that does not exist in a namespace is the "environment" entry. +This special entry 'imports' other config entries from a template stored in +cherrypy._cpconfig.environments[environment]. It only applies to the global +config, and only when you use cherrypy.config.update. + +You can define your own namespaces to be called at the Global, Application, +or Request level, by adding a named handler to cherrypy.config.namespaces, +app.namespaces, or app.request_class.namespaces. The name can +be any string, and the handler must be either a callable or a (Python 2.5 +style) context manager. +""" + +import cherrypy +from cherrypy._cpcompat import text_or_bytes +from cherrypy.lib import reprconf + +# Deprecated in CherryPy 3.2--remove in 3.3 +NamespaceSet = reprconf.NamespaceSet + + +def merge(base, other): + """Merge one app config (from a dict, file, or filename) into another. + + If the given config is a filename, it will be appended to + the list of files to monitor for "autoreload" changes. + """ + if isinstance(other, text_or_bytes): + cherrypy.engine.autoreload.files.add(other) + + # Load other into base + for section, value_map in reprconf.as_dict(other).items(): + if not isinstance(value_map, dict): + raise ValueError( + "Application config must include section headers, but the " + "config you tried to merge doesn't have any sections. " + "Wrap your config in another dict with paths as section " + "headers, for example: {'/': config}.") + base.setdefault(section, {}).update(value_map) + + +class Config(reprconf.Config): + + """The 'global' configuration data for the entire CherryPy process.""" + + def update(self, config): + """Update self from a dict, file or filename.""" + if isinstance(config, text_or_bytes): + # Filename + cherrypy.engine.autoreload.files.add(config) + reprconf.Config.update(self, config) + + def _apply(self, config): + """Update self from a dict.""" + if isinstance(config.get("global"), dict): + if len(config) > 1: + cherrypy.checker.global_config_contained_paths = True + config = config["global"] + if 'tools.staticdir.dir' in config: + config['tools.staticdir.section'] = "global" + reprconf.Config._apply(self, config) + + @staticmethod + def __call__(*args, **kwargs): + """Decorator for page handlers to set _cp_config.""" + if args: + raise TypeError( + "The cherrypy.config decorator does not accept positional " + "arguments; you must use keyword arguments.") + + def tool_decorator(f): + _Vars(f).setdefault('_cp_config', {}).update(kwargs) + return f + return tool_decorator + + +class _Vars(object): + """ + Adapter that allows setting a default attribute on a function + or class. + """ + def __init__(self, target): + self.target = target + + def setdefault(self, key, default): + if not hasattr(self.target, key): + setattr(self.target, key, default) + return getattr(self.target, key) + + +# Sphinx begin config.environments +Config.environments = environments = { + "staging": { + 'engine.autoreload.on': False, + 'checker.on': False, + 'tools.log_headers.on': False, + 'request.show_tracebacks': False, + 'request.show_mismatched_params': False, + }, + "production": { + 'engine.autoreload.on': False, + 'checker.on': False, + 'tools.log_headers.on': False, + 'request.show_tracebacks': False, + 'request.show_mismatched_params': False, + 'log.screen': False, + }, + "embedded": { + # For use with CherryPy embedded in another deployment stack. + 'engine.autoreload.on': False, + 'checker.on': False, + 'tools.log_headers.on': False, + 'request.show_tracebacks': False, + 'request.show_mismatched_params': False, + 'log.screen': False, + 'engine.SIGHUP': None, + 'engine.SIGTERM': None, + }, + "test_suite": { + 'engine.autoreload.on': False, + 'checker.on': False, + 'tools.log_headers.on': False, + 'request.show_tracebacks': True, + 'request.show_mismatched_params': True, + 'log.screen': False, + }, +} +# Sphinx end config.environments + + +def _server_namespace_handler(k, v): + """Config handler for the "server" namespace.""" + atoms = k.split(".", 1) + if len(atoms) > 1: + # Special-case config keys of the form 'server.servername.socket_port' + # to configure additional HTTP servers. + if not hasattr(cherrypy, "servers"): + cherrypy.servers = {} + + servername, k = atoms + if servername not in cherrypy.servers: + from cherrypy import _cpserver + cherrypy.servers[servername] = _cpserver.Server() + # On by default, but 'on = False' can unsubscribe it (see below). + cherrypy.servers[servername].subscribe() + + if k == 'on': + if v: + cherrypy.servers[servername].subscribe() + else: + cherrypy.servers[servername].unsubscribe() + else: + setattr(cherrypy.servers[servername], k, v) + else: + setattr(cherrypy.server, k, v) +Config.namespaces["server"] = _server_namespace_handler + + +def _engine_namespace_handler(k, v): + """Config handler for the "engine" namespace.""" + engine = cherrypy.engine + + if k == 'SIGHUP': + engine.subscribe('SIGHUP', v) + elif k == 'SIGTERM': + engine.subscribe('SIGTERM', v) + elif "." in k: + plugin, attrname = k.split(".", 1) + plugin = getattr(engine, plugin) + if attrname == 'on': + if v and hasattr(getattr(plugin, 'subscribe', None), '__call__'): + plugin.subscribe() + return + elif ( + (not v) and + hasattr(getattr(plugin, 'unsubscribe', None), '__call__') + ): + plugin.unsubscribe() + return + setattr(plugin, attrname, v) + else: + setattr(engine, k, v) +Config.namespaces["engine"] = _engine_namespace_handler + + +def _tree_namespace_handler(k, v): + """Namespace handler for the 'tree' config namespace.""" + if isinstance(v, dict): + for script_name, app in v.items(): + cherrypy.tree.graft(app, script_name) + msg = "Mounted: %s on %s" % (app, script_name or "/") + cherrypy.engine.log(msg) + else: + cherrypy.tree.graft(v, v.script_name) + cherrypy.engine.log("Mounted: %s on %s" % (v, v.script_name or "/")) +Config.namespaces["tree"] = _tree_namespace_handler diff --git a/deps/cherrypy/_cpdispatch.py b/deps/cherrypy/_cpdispatch.py new file mode 100644 index 00000000..2cb03c7e --- /dev/null +++ b/deps/cherrypy/_cpdispatch.py @@ -0,0 +1,685 @@ +"""CherryPy dispatchers. + +A 'dispatcher' is the object which looks up the 'page handler' callable +and collects config for the current request based on the path_info, other +request attributes, and the application architecture. The core calls the +dispatcher as early as possible, passing it a 'path_info' argument. + +The default dispatcher discovers the page handler by matching path_info +to a hierarchical arrangement of objects, starting at request.app.root. +""" + +import string +import sys +import types +try: + classtype = (type, types.ClassType) +except AttributeError: + classtype = type + +import cherrypy + + +class PageHandler(object): + + """Callable which sets response.body.""" + + def __init__(self, callable, *args, **kwargs): + self.callable = callable + self.args = args + self.kwargs = kwargs + + def get_args(self): + return cherrypy.serving.request.args + + def set_args(self, args): + cherrypy.serving.request.args = args + return cherrypy.serving.request.args + + args = property( + get_args, + set_args, + doc="The ordered args should be accessible from post dispatch hooks" + ) + + def get_kwargs(self): + return cherrypy.serving.request.kwargs + + def set_kwargs(self, kwargs): + cherrypy.serving.request.kwargs = kwargs + return cherrypy.serving.request.kwargs + + kwargs = property( + get_kwargs, + set_kwargs, + doc="The named kwargs should be accessible from post dispatch hooks" + ) + + def __call__(self): + try: + return self.callable(*self.args, **self.kwargs) + except TypeError: + x = sys.exc_info()[1] + try: + test_callable_spec(self.callable, self.args, self.kwargs) + except cherrypy.HTTPError: + raise sys.exc_info()[1] + except: + raise x + raise + + +def test_callable_spec(callable, callable_args, callable_kwargs): + """ + Inspect callable and test to see if the given args are suitable for it. + + When an error occurs during the handler's invoking stage there are 2 + erroneous cases: + 1. Too many parameters passed to a function which doesn't define + one of *args or **kwargs. + 2. Too little parameters are passed to the function. + + There are 3 sources of parameters to a cherrypy handler. + 1. query string parameters are passed as keyword parameters to the + handler. + 2. body parameters are also passed as keyword parameters. + 3. when partial matching occurs, the final path atoms are passed as + positional args. + Both the query string and path atoms are part of the URI. If they are + incorrect, then a 404 Not Found should be raised. Conversely the body + parameters are part of the request; if they are invalid a 400 Bad Request. + """ + show_mismatched_params = getattr( + cherrypy.serving.request, 'show_mismatched_params', False) + try: + (args, varargs, varkw, defaults) = getargspec(callable) + except TypeError: + if isinstance(callable, object) and hasattr(callable, '__call__'): + (args, varargs, varkw, + defaults) = getargspec(callable.__call__) + else: + # If it wasn't one of our own types, re-raise + # the original error + raise + + if args and args[0] == 'self': + args = args[1:] + + arg_usage = dict([(arg, 0,) for arg in args]) + vararg_usage = 0 + varkw_usage = 0 + extra_kwargs = set() + + for i, value in enumerate(callable_args): + try: + arg_usage[args[i]] += 1 + except IndexError: + vararg_usage += 1 + + for key in callable_kwargs.keys(): + try: + arg_usage[key] += 1 + except KeyError: + varkw_usage += 1 + extra_kwargs.add(key) + + # figure out which args have defaults. + args_with_defaults = args[-len(defaults or []):] + for i, val in enumerate(defaults or []): + # Defaults take effect only when the arg hasn't been used yet. + if arg_usage[args_with_defaults[i]] == 0: + arg_usage[args_with_defaults[i]] += 1 + + missing_args = [] + multiple_args = [] + for key, usage in arg_usage.items(): + if usage == 0: + missing_args.append(key) + elif usage > 1: + multiple_args.append(key) + + if missing_args: + # In the case where the method allows body arguments + # there are 3 potential errors: + # 1. not enough query string parameters -> 404 + # 2. not enough body parameters -> 400 + # 3. not enough path parts (partial matches) -> 404 + # + # We can't actually tell which case it is, + # so I'm raising a 404 because that covers 2/3 of the + # possibilities + # + # In the case where the method does not allow body + # arguments it's definitely a 404. + message = None + if show_mismatched_params: + message = "Missing parameters: %s" % ",".join(missing_args) + raise cherrypy.HTTPError(404, message=message) + + # the extra positional arguments come from the path - 404 Not Found + if not varargs and vararg_usage > 0: + raise cherrypy.HTTPError(404) + + body_params = cherrypy.serving.request.body.params or {} + body_params = set(body_params.keys()) + qs_params = set(callable_kwargs.keys()) - body_params + + if multiple_args: + if qs_params.intersection(set(multiple_args)): + # If any of the multiple parameters came from the query string then + # it's a 404 Not Found + error = 404 + else: + # Otherwise it's a 400 Bad Request + error = 400 + + message = None + if show_mismatched_params: + message = "Multiple values for parameters: "\ + "%s" % ",".join(multiple_args) + raise cherrypy.HTTPError(error, message=message) + + if not varkw and varkw_usage > 0: + + # If there were extra query string parameters, it's a 404 Not Found + extra_qs_params = set(qs_params).intersection(extra_kwargs) + if extra_qs_params: + message = None + if show_mismatched_params: + message = "Unexpected query string "\ + "parameters: %s" % ", ".join(extra_qs_params) + raise cherrypy.HTTPError(404, message=message) + + # If there were any extra body parameters, it's a 400 Not Found + extra_body_params = set(body_params).intersection(extra_kwargs) + if extra_body_params: + message = None + if show_mismatched_params: + message = "Unexpected body parameters: "\ + "%s" % ", ".join(extra_body_params) + raise cherrypy.HTTPError(400, message=message) + + +try: + import inspect +except ImportError: + test_callable_spec = lambda callable, args, kwargs: None +else: + getargspec = inspect.getargspec + # Python 3 requires using getfullargspec if keyword-only arguments are present + if hasattr(inspect, 'getfullargspec'): + def getargspec(callable): + return inspect.getfullargspec(callable)[:4] + + +class LateParamPageHandler(PageHandler): + + """When passing cherrypy.request.params to the page handler, we do not + want to capture that dict too early; we want to give tools like the + decoding tool a chance to modify the params dict in-between the lookup + of the handler and the actual calling of the handler. This subclass + takes that into account, and allows request.params to be 'bound late' + (it's more complicated than that, but that's the effect). + """ + + def _get_kwargs(self): + kwargs = cherrypy.serving.request.params.copy() + if self._kwargs: + kwargs.update(self._kwargs) + return kwargs + + def _set_kwargs(self, kwargs): + cherrypy.serving.request.kwargs = kwargs + self._kwargs = kwargs + + kwargs = property(_get_kwargs, _set_kwargs, + doc='page handler kwargs (with ' + 'cherrypy.request.params copied in)') + + +if sys.version_info < (3, 0): + punctuation_to_underscores = string.maketrans( + string.punctuation, '_' * len(string.punctuation)) + + def validate_translator(t): + if not isinstance(t, str) or len(t) != 256: + raise ValueError( + "The translate argument must be a str of len 256.") +else: + punctuation_to_underscores = str.maketrans( + string.punctuation, '_' * len(string.punctuation)) + + def validate_translator(t): + if not isinstance(t, dict): + raise ValueError("The translate argument must be a dict.") + + +class Dispatcher(object): + + """CherryPy Dispatcher which walks a tree of objects to find a handler. + + The tree is rooted at cherrypy.request.app.root, and each hierarchical + component in the path_info argument is matched to a corresponding nested + attribute of the root object. Matching handlers must have an 'exposed' + attribute which evaluates to True. The special method name "index" + matches a URI which ends in a slash ("/"). The special method name + "default" may match a portion of the path_info (but only when no longer + substring of the path_info matches some other object). + + This is the default, built-in dispatcher for CherryPy. + """ + + dispatch_method_name = '_cp_dispatch' + """ + The name of the dispatch method that nodes may optionally implement + to provide their own dynamic dispatch algorithm. + """ + + def __init__(self, dispatch_method_name=None, + translate=punctuation_to_underscores): + validate_translator(translate) + self.translate = translate + if dispatch_method_name: + self.dispatch_method_name = dispatch_method_name + + def __call__(self, path_info): + """Set handler and config for the current request.""" + request = cherrypy.serving.request + func, vpath = self.find_handler(path_info) + + if func: + # Decode any leftover %2F in the virtual_path atoms. + vpath = [x.replace("%2F", "/") for x in vpath] + request.handler = LateParamPageHandler(func, *vpath) + else: + request.handler = cherrypy.NotFound() + + def find_handler(self, path): + """Return the appropriate page handler, plus any virtual path. + + This will return two objects. The first will be a callable, + which can be used to generate page output. Any parameters from + the query string or request body will be sent to that callable + as keyword arguments. + + The callable is found by traversing the application's tree, + starting from cherrypy.request.app.root, and matching path + components to successive objects in the tree. For example, the + URL "/path/to/handler" might return root.path.to.handler. + + The second object returned will be a list of names which are + 'virtual path' components: parts of the URL which are dynamic, + and were not used when looking up the handler. + These virtual path components are passed to the handler as + positional arguments. + """ + request = cherrypy.serving.request + app = request.app + root = app.root + dispatch_name = self.dispatch_method_name + + # Get config for the root object/path. + fullpath = [x for x in path.strip('/').split('/') if x] + ['index'] + fullpath_len = len(fullpath) + segleft = fullpath_len + nodeconf = {} + if hasattr(root, "_cp_config"): + nodeconf.update(root._cp_config) + if "/" in app.config: + nodeconf.update(app.config["/"]) + object_trail = [['root', root, nodeconf, segleft]] + + node = root + iternames = fullpath[:] + while iternames: + name = iternames[0] + # map to legal Python identifiers (e.g. replace '.' with '_') + objname = name.translate(self.translate) + + nodeconf = {} + subnode = getattr(node, objname, None) + pre_len = len(iternames) + if subnode is None: + dispatch = getattr(node, dispatch_name, None) + if dispatch and hasattr(dispatch, '__call__') and not \ + getattr(dispatch, 'exposed', False) and \ + pre_len > 1: + # Don't expose the hidden 'index' token to _cp_dispatch + # We skip this if pre_len == 1 since it makes no sense + # to call a dispatcher when we have no tokens left. + index_name = iternames.pop() + subnode = dispatch(vpath=iternames) + iternames.append(index_name) + else: + # We didn't find a path, but keep processing in case there + # is a default() handler. + iternames.pop(0) + else: + # We found the path, remove the vpath entry + iternames.pop(0) + segleft = len(iternames) + if segleft > pre_len: + # No path segment was removed. Raise an error. + raise cherrypy.CherryPyException( + "A vpath segment was added. Custom dispatchers may only " + + "remove elements. While trying to process " + + "{0} in {1}".format(name, fullpath) + ) + elif segleft == pre_len: + # Assume that the handler used the current path segment, but + # did not pop it. This allows things like + # return getattr(self, vpath[0], None) + iternames.pop(0) + segleft -= 1 + node = subnode + + if node is not None: + # Get _cp_config attached to this node. + if hasattr(node, "_cp_config"): + nodeconf.update(node._cp_config) + + # Mix in values from app.config for this path. + existing_len = fullpath_len - pre_len + if existing_len != 0: + curpath = '/' + '/'.join(fullpath[0:existing_len]) + else: + curpath = '' + new_segs = fullpath[fullpath_len - pre_len:fullpath_len - segleft] + for seg in new_segs: + curpath += '/' + seg + if curpath in app.config: + nodeconf.update(app.config[curpath]) + + object_trail.append([name, node, nodeconf, segleft]) + + def set_conf(): + """Collapse all object_trail config into cherrypy.request.config. + """ + base = cherrypy.config.copy() + # Note that we merge the config from each node + # even if that node was None. + for name, obj, conf, segleft in object_trail: + base.update(conf) + if 'tools.staticdir.dir' in conf: + base['tools.staticdir.section'] = '/' + \ + '/'.join(fullpath[0:fullpath_len - segleft]) + return base + + # Try successive objects (reverse order) + num_candidates = len(object_trail) - 1 + for i in range(num_candidates, -1, -1): + + name, candidate, nodeconf, segleft = object_trail[i] + if candidate is None: + continue + + # Try a "default" method on the current leaf. + if hasattr(candidate, "default"): + defhandler = candidate.default + if getattr(defhandler, 'exposed', False): + # Insert any extra _cp_config from the default handler. + conf = getattr(defhandler, "_cp_config", {}) + object_trail.insert( + i + 1, ["default", defhandler, conf, segleft]) + request.config = set_conf() + # See https://github.com/cherrypy/cherrypy/issues/613 + request.is_index = path.endswith("/") + return defhandler, fullpath[fullpath_len - segleft:-1] + + # Uncomment the next line to restrict positional params to + # "default". + # if i < num_candidates - 2: continue + + # Try the current leaf. + if getattr(candidate, 'exposed', False): + request.config = set_conf() + if i == num_candidates: + # We found the extra ".index". Mark request so tools + # can redirect if path_info has no trailing slash. + request.is_index = True + else: + # We're not at an 'index' handler. Mark request so tools + # can redirect if path_info has NO trailing slash. + # Note that this also includes handlers which take + # positional parameters (virtual paths). + request.is_index = False + return candidate, fullpath[fullpath_len - segleft:-1] + + # We didn't find anything + request.config = set_conf() + return None, [] + + +class MethodDispatcher(Dispatcher): + + """Additional dispatch based on cherrypy.request.method.upper(). + + Methods named GET, POST, etc will be called on an exposed class. + The method names must be all caps; the appropriate Allow header + will be output showing all capitalized method names as allowable + HTTP verbs. + + Note that the containing class must be exposed, not the methods. + """ + + def __call__(self, path_info): + """Set handler and config for the current request.""" + request = cherrypy.serving.request + resource, vpath = self.find_handler(path_info) + + if resource: + # Set Allow header + avail = [m for m in dir(resource) if m.isupper()] + if "GET" in avail and "HEAD" not in avail: + avail.append("HEAD") + avail.sort() + cherrypy.serving.response.headers['Allow'] = ", ".join(avail) + + # Find the subhandler + meth = request.method.upper() + func = getattr(resource, meth, None) + if func is None and meth == "HEAD": + func = getattr(resource, "GET", None) + if func: + # Grab any _cp_config on the subhandler. + if hasattr(func, "_cp_config"): + request.config.update(func._cp_config) + + # Decode any leftover %2F in the virtual_path atoms. + vpath = [x.replace("%2F", "/") for x in vpath] + request.handler = LateParamPageHandler(func, *vpath) + else: + request.handler = cherrypy.HTTPError(405) + else: + request.handler = cherrypy.NotFound() + + +class RoutesDispatcher(object): + + """A Routes based dispatcher for CherryPy.""" + + def __init__(self, full_result=False, **mapper_options): + """ + Routes dispatcher + + Set full_result to True if you wish the controller + and the action to be passed on to the page handler + parameters. By default they won't be. + """ + import routes + self.full_result = full_result + self.controllers = {} + self.mapper = routes.Mapper(**mapper_options) + self.mapper.controller_scan = self.controllers.keys + + def connect(self, name, route, controller, **kwargs): + self.controllers[name] = controller + self.mapper.connect(name, route, controller=name, **kwargs) + + def redirect(self, url): + raise cherrypy.HTTPRedirect(url) + + def __call__(self, path_info): + """Set handler and config for the current request.""" + func = self.find_handler(path_info) + if func: + cherrypy.serving.request.handler = LateParamPageHandler(func) + else: + cherrypy.serving.request.handler = cherrypy.NotFound() + + def find_handler(self, path_info): + """Find the right page handler, and set request.config.""" + import routes + + request = cherrypy.serving.request + + config = routes.request_config() + config.mapper = self.mapper + if hasattr(request, 'wsgi_environ'): + config.environ = request.wsgi_environ + config.host = request.headers.get('Host', None) + config.protocol = request.scheme + config.redirect = self.redirect + + result = self.mapper.match(path_info) + + config.mapper_dict = result + params = {} + if result: + params = result.copy() + if not self.full_result: + params.pop('controller', None) + params.pop('action', None) + request.params.update(params) + + # Get config for the root object/path. + request.config = base = cherrypy.config.copy() + curpath = "" + + def merge(nodeconf): + if 'tools.staticdir.dir' in nodeconf: + nodeconf['tools.staticdir.section'] = curpath or "/" + base.update(nodeconf) + + app = request.app + root = app.root + if hasattr(root, "_cp_config"): + merge(root._cp_config) + if "/" in app.config: + merge(app.config["/"]) + + # Mix in values from app.config. + atoms = [x for x in path_info.split("/") if x] + if atoms: + last = atoms.pop() + else: + last = None + for atom in atoms: + curpath = "/".join((curpath, atom)) + if curpath in app.config: + merge(app.config[curpath]) + + handler = None + if result: + controller = result.get('controller') + controller = self.controllers.get(controller, controller) + if controller: + if isinstance(controller, classtype): + controller = controller() + # Get config from the controller. + if hasattr(controller, "_cp_config"): + merge(controller._cp_config) + + action = result.get('action') + if action is not None: + handler = getattr(controller, action, None) + # Get config from the handler + if hasattr(handler, "_cp_config"): + merge(handler._cp_config) + else: + handler = controller + + # Do the last path atom here so it can + # override the controller's _cp_config. + if last: + curpath = "/".join((curpath, last)) + if curpath in app.config: + merge(app.config[curpath]) + + return handler + + +def XMLRPCDispatcher(next_dispatcher=Dispatcher()): + from cherrypy.lib import xmlrpcutil + + def xmlrpc_dispatch(path_info): + path_info = xmlrpcutil.patched_path(path_info) + return next_dispatcher(path_info) + return xmlrpc_dispatch + + +def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True, + **domains): + """ + Select a different handler based on the Host header. + + This can be useful when running multiple sites within one CP server. + It allows several domains to point to different parts of a single + website structure. For example:: + + http://www.domain.example -> root + http://www.domain2.example -> root/domain2/ + http://www.domain2.example:443 -> root/secure + + can be accomplished via the following config:: + + [/] + request.dispatch = cherrypy.dispatch.VirtualHost( + **{'www.domain2.example': '/domain2', + 'www.domain2.example:443': '/secure', + }) + + next_dispatcher + The next dispatcher object in the dispatch chain. + The VirtualHost dispatcher adds a prefix to the URL and calls + another dispatcher. Defaults to cherrypy.dispatch.Dispatcher(). + + use_x_forwarded_host + If True (the default), any "X-Forwarded-Host" + request header will be used instead of the "Host" header. This + is commonly added by HTTP servers (such as Apache) when proxying. + + ``**domains`` + A dict of {host header value: virtual prefix} pairs. + The incoming "Host" request header is looked up in this dict, + and, if a match is found, the corresponding "virtual prefix" + value will be prepended to the URL path before calling the + next dispatcher. Note that you often need separate entries + for "example.com" and "www.example.com". In addition, "Host" + headers may contain the port number. + """ + from cherrypy.lib import httputil + + def vhost_dispatch(path_info): + request = cherrypy.serving.request + header = request.headers.get + + domain = header('Host', '') + if use_x_forwarded_host: + domain = header("X-Forwarded-Host", domain) + + prefix = domains.get(domain, "") + if prefix: + path_info = httputil.urljoin(prefix, path_info) + + result = next_dispatcher(path_info) + + # Touch up staticdir config. See + # https://github.com/cherrypy/cherrypy/issues/614. + section = request.config.get('tools.staticdir.section') + if section: + section = section[len(prefix):] + request.config['tools.staticdir.section'] = section + + return result + return vhost_dispatch diff --git a/deps/cherrypy/_cperror.py b/deps/cherrypy/_cperror.py new file mode 100644 index 00000000..ee7fad69 --- /dev/null +++ b/deps/cherrypy/_cperror.py @@ -0,0 +1,622 @@ +"""Exception classes for CherryPy. + +CherryPy provides (and uses) exceptions for declaring that the HTTP response +should be a status other than the default "200 OK". You can ``raise`` them like +normal Python exceptions. You can also call them and they will raise +themselves; this means you can set an +:class:`HTTPError` +or :class:`HTTPRedirect` as the +:attr:`request.handler`. + +.. _redirectingpost: + +Redirecting POST +================ + +When you GET a resource and are redirected by the server to another Location, +there's generally no problem since GET is both a "safe method" (there should +be no side-effects) and an "idempotent method" (multiple calls are no different +than a single call). + +POST, however, is neither safe nor idempotent--if you +charge a credit card, you don't want to be charged twice by a redirect! + +For this reason, *none* of the 3xx responses permit a user-agent (browser) to +resubmit a POST on redirection without first confirming the action with the +user: + +===== ================================= =========== +300 Multiple Choices Confirm with the user +301 Moved Permanently Confirm with the user +302 Found (Object moved temporarily) Confirm with the user +303 See Other GET the new URI--no confirmation +304 Not modified (for conditional GET only--POST should not raise this error) +305 Use Proxy Confirm with the user +307 Temporary Redirect Confirm with the user +===== ================================= =========== + +However, browsers have historically implemented these restrictions poorly; +in particular, many browsers do not force the user to confirm 301, 302 +or 307 when redirecting POST. For this reason, CherryPy defaults to 303, +which most user-agents appear to have implemented correctly. Therefore, if +you raise HTTPRedirect for a POST request, the user-agent will most likely +attempt to GET the new URI (without asking for confirmation from the user). +We realize this is confusing for developers, but it's the safest thing we +could do. You are of course free to raise ``HTTPRedirect(uri, status=302)`` +or any other 3xx status if you know what you're doing, but given the +environment, we couldn't let any of those be the default. + +Custom Error Handling +===================== + +.. image:: /refman/cperrors.gif + +Anticipated HTTP responses +-------------------------- + +The 'error_page' config namespace can be used to provide custom HTML output for +expected responses (like 404 Not Found). Supply a filename from which the +output will be read. The contents will be interpolated with the values +%(status)s, %(message)s, %(traceback)s, and %(version)s using plain old Python +`string formatting `_. + +:: + + _cp_config = { + 'error_page.404': os.path.join(localDir, "static/index.html") + } + + +Beginning in version 3.1, you may also provide a function or other callable as +an error_page entry. It will be passed the same status, message, traceback and +version arguments that are interpolated into templates:: + + def error_page_402(status, message, traceback, version): + return "Error %s - Well, I'm very sorry but you haven't paid!" % status + cherrypy.config.update({'error_page.402': error_page_402}) + +Also in 3.1, in addition to the numbered error codes, you may also supply +"error_page.default" to handle all codes which do not have their own error_page +entry. + + + +Unanticipated errors +-------------------- + +CherryPy also has a generic error handling mechanism: whenever an unanticipated +error occurs in your code, it will call +:func:`Request.error_response` to +set the response status, headers, and body. By default, this is the same +output as +:class:`HTTPError(500) `. If you want to provide +some other behavior, you generally replace "request.error_response". + +Here is some sample code that shows how to display a custom error message and +send an e-mail containing the error:: + + from cherrypy import _cperror + + def handle_error(): + cherrypy.response.status = 500 + cherrypy.response.body = [ + "Sorry, an error occured" + ] + sendMail('error@domain.com', + 'Error in your web app', + _cperror.format_exc()) + + @cherrypy.config(**{'request.error_response': handle_error}) + class Root: + pass + +Note that you have to explicitly set +:attr:`response.body ` +and not simply return an error message as a result. +""" + +import contextlib +from cgi import escape as _escape +from sys import exc_info as _exc_info +from traceback import format_exception as _format_exception +from xml.sax import saxutils + +import six + +from cherrypy._cpcompat import text_or_bytes, iteritems, ntob +from cherrypy._cpcompat import tonative, urljoin as _urljoin +from cherrypy.lib import httputil as _httputil + + +class CherryPyException(Exception): + + """A base class for CherryPy exceptions.""" + pass + + +class TimeoutError(CherryPyException): + + """Exception raised when Response.timed_out is detected.""" + pass + + +class InternalRedirect(CherryPyException): + + """Exception raised to switch to the handler for a different URL. + + This exception will redirect processing to another path within the site + (without informing the client). Provide the new path as an argument when + raising the exception. Provide any params in the querystring for the new + URL. + """ + + def __init__(self, path, query_string=""): + import cherrypy + self.request = cherrypy.serving.request + + self.query_string = query_string + if "?" in path: + # Separate any params included in the path + path, self.query_string = path.split("?", 1) + + # Note that urljoin will "do the right thing" whether url is: + # 1. a URL relative to root (e.g. "/dummy") + # 2. a URL relative to the current path + # Note that any query string will be discarded. + path = _urljoin(self.request.path_info, path) + + # Set a 'path' member attribute so that code which traps this + # error can have access to it. + self.path = path + + CherryPyException.__init__(self, path, self.query_string) + + +class HTTPRedirect(CherryPyException): + + """Exception raised when the request should be redirected. + + This exception will force a HTTP redirect to the URL or URL's you give it. + The new URL must be passed as the first argument to the Exception, + e.g., HTTPRedirect(newUrl). Multiple URLs are allowed in a list. + If a URL is absolute, it will be used as-is. If it is relative, it is + assumed to be relative to the current cherrypy.request.path_info. + + If one of the provided URL is a unicode object, it will be encoded + using the default encoding or the one passed in parameter. + + There are multiple types of redirect, from which you can select via the + ``status`` argument. If you do not provide a ``status`` arg, it defaults to + 303 (or 302 if responding with HTTP/1.0). + + Examples:: + + raise cherrypy.HTTPRedirect("") + raise cherrypy.HTTPRedirect("/abs/path", 307) + raise cherrypy.HTTPRedirect(["path1", "path2?a=1&b=2"], 301) + + See :ref:`redirectingpost` for additional caveats. + """ + + status = None + """The integer HTTP status code to emit.""" + + urls = None + """The list of URL's to emit.""" + + encoding = 'utf-8' + """The encoding when passed urls are not native strings""" + + def __init__(self, urls, status=None, encoding=None): + import cherrypy + request = cherrypy.serving.request + + if isinstance(urls, text_or_bytes): + urls = [urls] + + abs_urls = [] + for url in urls: + url = tonative(url, encoding or self.encoding) + + # Note that urljoin will "do the right thing" whether url is: + # 1. a complete URL with host (e.g. "http://www.example.com/test") + # 2. a URL relative to root (e.g. "/dummy") + # 3. a URL relative to the current path + # Note that any query string in cherrypy.request is discarded. + url = _urljoin(cherrypy.url(), url) + abs_urls.append(url) + self.urls = abs_urls + + # RFC 2616 indicates a 301 response code fits our goal; however, + # browser support for 301 is quite messy. Do 302/303 instead. See + # http://www.alanflavell.org.uk/www/post-redirect.html + if status is None: + if request.protocol >= (1, 1): + status = 303 + else: + status = 302 + else: + status = int(status) + if status < 300 or status > 399: + raise ValueError("status must be between 300 and 399.") + + self.status = status + CherryPyException.__init__(self, abs_urls, status) + + def set_response(self): + """Modify cherrypy.response status, headers, and body to represent + self. + + CherryPy uses this internally, but you can also use it to create an + HTTPRedirect object and set its output without *raising* the exception. + """ + import cherrypy + response = cherrypy.serving.response + response.status = status = self.status + + if status in (300, 301, 302, 303, 307): + response.headers['Content-Type'] = "text/html;charset=utf-8" + # "The ... URI SHOULD be given by the Location field + # in the response." + response.headers['Location'] = self.urls[0] + + # "Unless the request method was HEAD, the entity of the response + # SHOULD contain a short hypertext note with a hyperlink to the + # new URI(s)." + msg = { + 300: "This resource can be found at ", + 301: "This resource has permanently moved to ", + 302: "This resource resides temporarily at ", + 303: "This resource can be found at ", + 307: "This resource has moved temporarily to ", + }[status] + msg += '%s.' + msgs = [msg % (saxutils.quoteattr(u), u) for u in self.urls] + response.body = ntob("
\n".join(msgs), 'utf-8') + # Previous code may have set C-L, so we have to reset it + # (allow finalize to set it). + response.headers.pop('Content-Length', None) + elif status == 304: + # Not Modified. + # "The response MUST include the following header fields: + # Date, unless its omission is required by section 14.18.1" + # The "Date" header should have been set in Response.__init__ + + # "...the response SHOULD NOT include other entity-headers." + for key in ('Allow', 'Content-Encoding', 'Content-Language', + 'Content-Length', 'Content-Location', 'Content-MD5', + 'Content-Range', 'Content-Type', 'Expires', + 'Last-Modified'): + if key in response.headers: + del response.headers[key] + + # "The 304 response MUST NOT contain a message-body." + response.body = None + # Previous code may have set C-L, so we have to reset it. + response.headers.pop('Content-Length', None) + elif status == 305: + # Use Proxy. + # self.urls[0] should be the URI of the proxy. + response.headers['Location'] = ntob(self.urls[0], 'utf-8') + response.body = None + # Previous code may have set C-L, so we have to reset it. + response.headers.pop('Content-Length', None) + else: + raise ValueError("The %s status code is unknown." % status) + + def __call__(self): + """Use this exception as a request.handler (raise self).""" + raise self + + +def clean_headers(status): + """Remove any headers which should not apply to an error response.""" + import cherrypy + + response = cherrypy.serving.response + + # Remove headers which applied to the original content, + # but do not apply to the error page. + respheaders = response.headers + for key in ["Accept-Ranges", "Age", "ETag", "Location", "Retry-After", + "Vary", "Content-Encoding", "Content-Length", "Expires", + "Content-Location", "Content-MD5", "Last-Modified"]: + if key in respheaders: + del respheaders[key] + + if status != 416: + # A server sending a response with status code 416 (Requested + # range not satisfiable) SHOULD include a Content-Range field + # with a byte-range-resp-spec of "*". The instance-length + # specifies the current length of the selected resource. + # A response with status code 206 (Partial Content) MUST NOT + # include a Content-Range field with a byte-range- resp-spec of "*". + if "Content-Range" in respheaders: + del respheaders["Content-Range"] + + +class HTTPError(CherryPyException): + + """Exception used to return an HTTP error code (4xx-5xx) to the client. + + This exception can be used to automatically send a response using a + http status code, with an appropriate error page. It takes an optional + ``status`` argument (which must be between 400 and 599); it defaults to 500 + ("Internal Server Error"). It also takes an optional ``message`` argument, + which will be returned in the response body. See + `RFC2616 `_ + for a complete list of available error codes and when to use them. + + Examples:: + + raise cherrypy.HTTPError(403) + raise cherrypy.HTTPError( + "403 Forbidden", "You are not allowed to access this resource.") + """ + + status = None + """The HTTP status code. May be of type int or str (with a Reason-Phrase). + """ + + code = None + """The integer HTTP status code.""" + + reason = None + """The HTTP Reason-Phrase string.""" + + def __init__(self, status=500, message=None): + self.status = status + try: + self.code, self.reason, defaultmsg = _httputil.valid_status(status) + except ValueError: + raise self.__class__(500, _exc_info()[1].args[0]) + + if self.code < 400 or self.code > 599: + raise ValueError("status must be between 400 and 599.") + + # See http://www.python.org/dev/peps/pep-0352/ + # self.message = message + self._message = message or defaultmsg + CherryPyException.__init__(self, status, message) + + def set_response(self): + """Modify cherrypy.response status, headers, and body to represent + self. + + CherryPy uses this internally, but you can also use it to create an + HTTPError object and set its output without *raising* the exception. + """ + import cherrypy + + response = cherrypy.serving.response + + clean_headers(self.code) + + # In all cases, finalize will be called after this method, + # so don't bother cleaning up response values here. + response.status = self.status + tb = None + if cherrypy.serving.request.show_tracebacks: + tb = format_exc() + + response.headers.pop('Content-Length', None) + + content = self.get_error_page(self.status, traceback=tb, + message=self._message) + response.body = content + + _be_ie_unfriendly(self.code) + + def get_error_page(self, *args, **kwargs): + return get_error_page(*args, **kwargs) + + def __call__(self): + """Use this exception as a request.handler (raise self).""" + raise self + + @classmethod + @contextlib.contextmanager + def handle(cls, exception, status=500, message=''): + """Translate exception into an HTTPError.""" + try: + yield + except exception as exc: + raise cls(status, message or str(exc)) + + +class NotFound(HTTPError): + + """Exception raised when a URL could not be mapped to any handler (404). + + This is equivalent to raising + :class:`HTTPError("404 Not Found") `. + """ + + def __init__(self, path=None): + if path is None: + import cherrypy + request = cherrypy.serving.request + path = request.script_name + request.path_info + self.args = (path,) + HTTPError.__init__(self, 404, "The path '%s' was not found." % path) + + +_HTTPErrorTemplate = ''' + + + + %(status)s + + + +

%(status)s

+

%(message)s

+
%(traceback)s
+
+ + Powered by CherryPy %(version)s + +
+ + +''' + + +def get_error_page(status, **kwargs): + """Return an HTML page, containing a pretty error response. + + status should be an int or a str. + kwargs will be interpolated into the page template. + """ + import cherrypy + + try: + code, reason, message = _httputil.valid_status(status) + except ValueError: + raise cherrypy.HTTPError(500, _exc_info()[1].args[0]) + + # We can't use setdefault here, because some + # callers send None for kwarg values. + if kwargs.get('status') is None: + kwargs['status'] = "%s %s" % (code, reason) + if kwargs.get('message') is None: + kwargs['message'] = message + if kwargs.get('traceback') is None: + kwargs['traceback'] = '' + if kwargs.get('version') is None: + kwargs['version'] = cherrypy.__version__ + + for k, v in iteritems(kwargs): + if v is None: + kwargs[k] = "" + else: + kwargs[k] = _escape(kwargs[k]) + + # Use a custom template or callable for the error page? + pages = cherrypy.serving.request.error_page + error_page = pages.get(code) or pages.get('default') + + # Default template, can be overridden below. + template = _HTTPErrorTemplate + if error_page: + try: + if hasattr(error_page, '__call__'): + # The caller function may be setting headers manually, + # so we delegate to it completely. We may be returning + # an iterator as well as a string here. + # + # We *must* make sure any content is not unicode. + result = error_page(**kwargs) + if cherrypy.lib.is_iterator(result): + from cherrypy.lib.encoding import UTF8StreamEncoder + return UTF8StreamEncoder(result) + elif isinstance(result, six.text_type): + return result.encode('utf-8') + else: + if not isinstance(result, bytes): + raise ValueError('error page function did not ' + 'return a bytestring, six.text_typeing or an ' + 'iterator - returned object of type %s.' + % (type(result).__name__)) + return result + else: + # Load the template from this path. + template = tonative(open(error_page, 'rb').read()) + except: + e = _format_exception(*_exc_info())[-1] + m = kwargs['message'] + if m: + m += "
" + m += "In addition, the custom error page failed:\n
%s" % e + kwargs['message'] = m + + response = cherrypy.serving.response + response.headers['Content-Type'] = "text/html;charset=utf-8" + result = template % kwargs + return result.encode('utf-8') + + + +_ie_friendly_error_sizes = { + 400: 512, 403: 256, 404: 512, 405: 256, + 406: 512, 408: 512, 409: 512, 410: 256, + 500: 512, 501: 512, 505: 512, +} + + +def _be_ie_unfriendly(status): + import cherrypy + response = cherrypy.serving.response + + # For some statuses, Internet Explorer 5+ shows "friendly error + # messages" instead of our response.body if the body is smaller + # than a given size. Fix this by returning a body over that size + # (by adding whitespace). + # See http://support.microsoft.com/kb/q218155/ + s = _ie_friendly_error_sizes.get(status, 0) + if s: + s += 1 + # Since we are issuing an HTTP error status, we assume that + # the entity is short, and we should just collapse it. + content = response.collapse_body() + l = len(content) + if l and l < s: + # IN ADDITION: the response must be written to IE + # in one chunk or it will still get replaced! Bah. + content = content + (ntob(" ") * (s - l)) + response.body = content + response.headers['Content-Length'] = str(len(content)) + + +def format_exc(exc=None): + """Return exc (or sys.exc_info if None), formatted.""" + try: + if exc is None: + exc = _exc_info() + if exc == (None, None, None): + return "" + import traceback + return "".join(traceback.format_exception(*exc)) + finally: + del exc + + +def bare_error(extrabody=None): + """Produce status, headers, body for a critical error. + + Returns a triple without calling any other questionable functions, + so it should be as error-free as possible. Call it from an HTTP server + if you get errors outside of the request. + + If extrabody is None, a friendly but rather unhelpful error message + is set in the body. If extrabody is a string, it will be appended + as-is to the body. + """ + + # The whole point of this function is to be a last line-of-defense + # in handling errors. That is, it must not raise any errors itself; + # it cannot be allowed to fail. Therefore, don't add to it! + # In particular, don't call any other CP functions. + + body = ntob("Unrecoverable error in the server.") + if extrabody is not None: + if not isinstance(extrabody, bytes): + extrabody = extrabody.encode('utf-8') + body += ntob("\n") + extrabody + + return (ntob("500 Internal Server Error"), + [(ntob('Content-Type'), ntob('text/plain')), + (ntob('Content-Length'), ntob(str(len(body)), 'ISO-8859-1'))], + [body]) diff --git a/deps/cherrypy/_cplogging.py b/deps/cherrypy/_cplogging.py new file mode 100644 index 00000000..b13b49e6 --- /dev/null +++ b/deps/cherrypy/_cplogging.py @@ -0,0 +1,462 @@ +""" +Simple config +============= + +Although CherryPy uses the :mod:`Python logging module `, it does so +behind the scenes so that simple logging is simple, but complicated logging +is still possible. "Simple" logging means that you can log to the screen +(i.e. console/stdout) or to a file, and that you can easily have separate +error and access log files. + +Here are the simplified logging settings. You use these by adding lines to +your config file or dict. You should set these at either the global level or +per application (see next), but generally not both. + + * ``log.screen``: Set this to True to have both "error" and "access" messages + printed to stdout. + * ``log.access_file``: Set this to an absolute filename where you want + "access" messages written. + * ``log.error_file``: Set this to an absolute filename where you want "error" + messages written. + +Many events are automatically logged; to log your own application events, call +:func:`cherrypy.log`. + +Architecture +============ + +Separate scopes +--------------- + +CherryPy provides log managers at both the global and application layers. +This means you can have one set of logging rules for your entire site, +and another set of rules specific to each application. The global log +manager is found at :func:`cherrypy.log`, and the log manager for each +application is found at :attr:`app.log`. +If you're inside a request, the latter is reachable from +``cherrypy.request.app.log``; if you're outside a request, you'll have to +obtain a reference to the ``app``: either the return value of +:func:`tree.mount()` or, if you used +:func:`quickstart()` instead, via +``cherrypy.tree.apps['/']``. + +By default, the global logs are named "cherrypy.error" and "cherrypy.access", +and the application logs are named "cherrypy.error.2378745" and +"cherrypy.access.2378745" (the number is the id of the Application object). +This means that the application logs "bubble up" to the site logs, so if your +application has no log handlers, the site-level handlers will still log the +messages. + +Errors vs. Access +----------------- + +Each log manager handles both "access" messages (one per HTTP request) and +"error" messages (everything else). Note that the "error" log is not just for +errors! The format of access messages is highly formalized, but the error log +isn't--it receives messages from a variety of sources (including full error +tracebacks, if enabled). + +If you are logging the access log and error log to the same source, then there +is a possibility that a specially crafted error message may replicate an access +log message as described in CWE-117. In this case it is the application +developer's responsibility to manually escape data before using CherryPy's log() +functionality, or they may create an application that is vulnerable to CWE-117. +This would be achieved by using a custom handler escape any special characters, +and attached as described below. + +Custom Handlers +=============== + +The simple settings above work by manipulating Python's standard :mod:`logging` +module. So when you need something more complex, the full power of the standard +module is yours to exploit. You can borrow or create custom handlers, formats, +filters, and much more. Here's an example that skips the standard FileHandler +and uses a RotatingFileHandler instead: + +:: + + #python + log = app.log + + # Remove the default FileHandlers if present. + log.error_file = "" + log.access_file = "" + + maxBytes = getattr(log, "rot_maxBytes", 10000000) + backupCount = getattr(log, "rot_backupCount", 1000) + + # Make a new RotatingFileHandler for the error log. + fname = getattr(log, "rot_error_file", "error.log") + h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount) + h.setLevel(DEBUG) + h.setFormatter(_cplogging.logfmt) + log.error_log.addHandler(h) + + # Make a new RotatingFileHandler for the access log. + fname = getattr(log, "rot_access_file", "access.log") + h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount) + h.setLevel(DEBUG) + h.setFormatter(_cplogging.logfmt) + log.access_log.addHandler(h) + + +The ``rot_*`` attributes are pulled straight from the application log object. +Since "log.*" config entries simply set attributes on the log object, you can +add custom attributes to your heart's content. Note that these handlers are +used ''instead'' of the default, simple handlers outlined above (so don't set +the "log.error_file" config entry, for example). +""" + +import datetime +import logging +# Silence the no-handlers "warning" (stderr write!) in stdlib logging +logging.Logger.manager.emittedNoHandlerWarning = 1 +logfmt = logging.Formatter("%(message)s") +import os +import sys + +import six + +import cherrypy +from cherrypy import _cperror +from cherrypy._cpcompat import ntob + + +class NullHandler(logging.Handler): + + """A no-op logging handler to silence the logging.lastResort handler.""" + + def handle(self, record): + pass + + def emit(self, record): + pass + + def createLock(self): + self.lock = None + + +class LogManager(object): + + """An object to assist both simple and advanced logging. + + ``cherrypy.log`` is an instance of this class. + """ + + appid = None + """The id() of the Application object which owns this log manager. If this + is a global log manager, appid is None.""" + + error_log = None + """The actual :class:`logging.Logger` instance for error messages.""" + + access_log = None + """The actual :class:`logging.Logger` instance for access messages.""" + + access_log_format = ( + '{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}"' + if six.PY3 else + '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' + ) + + logger_root = None + """The "top-level" logger name. + + This string will be used as the first segment in the Logger names. + The default is "cherrypy", for example, in which case the Logger names + will be of the form:: + + cherrypy.error. + cherrypy.access. + """ + + def __init__(self, appid=None, logger_root="cherrypy"): + self.logger_root = logger_root + self.appid = appid + if appid is None: + self.error_log = logging.getLogger("%s.error" % logger_root) + self.access_log = logging.getLogger("%s.access" % logger_root) + else: + self.error_log = logging.getLogger( + "%s.error.%s" % (logger_root, appid)) + self.access_log = logging.getLogger( + "%s.access.%s" % (logger_root, appid)) + self.error_log.setLevel(logging.INFO) + self.access_log.setLevel(logging.INFO) + + # Silence the no-handlers "warning" (stderr write!) in stdlib logging + self.error_log.addHandler(NullHandler()) + self.access_log.addHandler(NullHandler()) + + cherrypy.engine.subscribe('graceful', self.reopen_files) + + def reopen_files(self): + """Close and reopen all file handlers.""" + for log in (self.error_log, self.access_log): + for h in log.handlers: + if isinstance(h, logging.FileHandler): + h.acquire() + h.stream.close() + h.stream = open(h.baseFilename, h.mode) + h.release() + + def error(self, msg='', context='', severity=logging.INFO, + traceback=False): + """Write the given ``msg`` to the error log. + + This is not just for errors! Applications may call this at any time + to log application-specific information. + + If ``traceback`` is True, the traceback of the current exception + (if any) will be appended to ``msg``. + """ + exc_info = None + if traceback: + exc_info = _cperror._exc_info() + + self.error_log.log(severity, ' '.join((self.time(), context, msg)), exc_info=exc_info) + + def __call__(self, *args, **kwargs): + """An alias for ``error``.""" + return self.error(*args, **kwargs) + + def access(self): + """Write to the access log (in Apache/NCSA Combined Log format). + + See the + `apache documentation `_ + for format details. + + CherryPy calls this automatically for you. Note there are no arguments; + it collects the data itself from + :class:`cherrypy.request`. + + Like Apache started doing in 2.0.46, non-printable and other special + characters in %r (and we expand that to all parts) are escaped using + \\xhh sequences, where hh stands for the hexadecimal representation + of the raw byte. Exceptions from this rule are " and \\, which are + escaped by prepending a backslash, and all whitespace characters, + which are written in their C-style notation (\\n, \\t, etc). + """ + request = cherrypy.serving.request + remote = request.remote + response = cherrypy.serving.response + outheaders = response.headers + inheaders = request.headers + if response.output_status is None: + status = "-" + else: + status = response.output_status.split(ntob(" "), 1)[0] + if six.PY3: + status = status.decode('ISO-8859-1') + + atoms = {'h': remote.name or remote.ip, + 'l': '-', + 'u': getattr(request, "login", None) or "-", + 't': self.time(), + 'r': request.request_line, + 's': status, + 'b': dict.get(outheaders, 'Content-Length', '') or "-", + 'f': dict.get(inheaders, 'Referer', ''), + 'a': dict.get(inheaders, 'User-Agent', ''), + 'o': dict.get(inheaders, 'Host', '-'), + } + if six.PY3: + for k, v in atoms.items(): + if not isinstance(v, str): + v = str(v) + v = v.replace('"', '\\"').encode('utf8') + # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc + # and backslash for us. All we have to do is strip the quotes. + v = repr(v)[2:-1] + + # in python 3.0 the repr of bytes (as returned by encode) + # uses double \'s. But then the logger escapes them yet, again + # resulting in quadruple slashes. Remove the extra one here. + v = v.replace('\\\\', '\\') + + # Escape double-quote. + atoms[k] = v + + try: + self.access_log.log( + logging.INFO, self.access_log_format.format(**atoms)) + except: + self(traceback=True) + else: + for k, v in atoms.items(): + if isinstance(v, six.text_type): + v = v.encode('utf8') + elif not isinstance(v, str): + v = str(v) + # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc + # and backslash for us. All we have to do is strip the quotes. + v = repr(v)[1:-1] + # Escape double-quote. + atoms[k] = v.replace('"', '\\"') + + try: + self.access_log.log( + logging.INFO, self.access_log_format % atoms) + except: + self(traceback=True) + + def time(self): + """Return now() in Apache Common Log Format (no timezone).""" + now = datetime.datetime.now() + monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', + 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'] + month = monthnames[now.month - 1].capitalize() + return ('[%02d/%s/%04d:%02d:%02d:%02d]' % + (now.day, month, now.year, now.hour, now.minute, now.second)) + + def _get_builtin_handler(self, log, key): + for h in log.handlers: + if getattr(h, "_cpbuiltin", None) == key: + return h + + # ------------------------- Screen handlers ------------------------- # + def _set_screen_handler(self, log, enable, stream=None): + h = self._get_builtin_handler(log, "screen") + if enable: + if not h: + if stream is None: + stream = sys.stderr + h = logging.StreamHandler(stream) + h.setFormatter(logfmt) + h._cpbuiltin = "screen" + log.addHandler(h) + elif h: + log.handlers.remove(h) + + def _get_screen(self): + h = self._get_builtin_handler + has_h = h(self.error_log, "screen") or h(self.access_log, "screen") + return bool(has_h) + + def _set_screen(self, newvalue): + self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr) + self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout) + screen = property(_get_screen, _set_screen, + doc="""Turn stderr/stdout logging on or off. + + If you set this to True, it'll add the appropriate StreamHandler for + you. If you set it to False, it will remove the handler. + """) + + # -------------------------- File handlers -------------------------- # + + def _add_builtin_file_handler(self, log, fname): + h = logging.FileHandler(fname) + h.setFormatter(logfmt) + h._cpbuiltin = "file" + log.addHandler(h) + + def _set_file_handler(self, log, filename): + h = self._get_builtin_handler(log, "file") + if filename: + if h: + if h.baseFilename != os.path.abspath(filename): + h.close() + log.handlers.remove(h) + self._add_builtin_file_handler(log, filename) + else: + self._add_builtin_file_handler(log, filename) + else: + if h: + h.close() + log.handlers.remove(h) + + def _get_error_file(self): + h = self._get_builtin_handler(self.error_log, "file") + if h: + return h.baseFilename + return '' + + def _set_error_file(self, newvalue): + self._set_file_handler(self.error_log, newvalue) + error_file = property(_get_error_file, _set_error_file, + doc="""The filename for self.error_log. + + If you set this to a string, it'll add the appropriate FileHandler for + you. If you set it to ``None`` or ``''``, it will remove the handler. + """) + + def _get_access_file(self): + h = self._get_builtin_handler(self.access_log, "file") + if h: + return h.baseFilename + return '' + + def _set_access_file(self, newvalue): + self._set_file_handler(self.access_log, newvalue) + access_file = property(_get_access_file, _set_access_file, + doc="""The filename for self.access_log. + + If you set this to a string, it'll add the appropriate FileHandler for + you. If you set it to ``None`` or ``''``, it will remove the handler. + """) + + # ------------------------- WSGI handlers ------------------------- # + + def _set_wsgi_handler(self, log, enable): + h = self._get_builtin_handler(log, "wsgi") + if enable: + if not h: + h = WSGIErrorHandler() + h.setFormatter(logfmt) + h._cpbuiltin = "wsgi" + log.addHandler(h) + elif h: + log.handlers.remove(h) + + def _get_wsgi(self): + return bool(self._get_builtin_handler(self.error_log, "wsgi")) + + def _set_wsgi(self, newvalue): + self._set_wsgi_handler(self.error_log, newvalue) + wsgi = property(_get_wsgi, _set_wsgi, + doc="""Write errors to wsgi.errors. + + If you set this to True, it'll add the appropriate + :class:`WSGIErrorHandler` for you + (which writes errors to ``wsgi.errors``). + If you set it to False, it will remove the handler. + """) + + +class WSGIErrorHandler(logging.Handler): + + "A handler class which writes logging records to environ['wsgi.errors']." + + def flush(self): + """Flushes the stream.""" + try: + stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors') + except (AttributeError, KeyError): + pass + else: + stream.flush() + + def emit(self, record): + """Emit a record.""" + try: + stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors') + except (AttributeError, KeyError): + pass + else: + try: + msg = self.format(record) + fs = "%s\n" + import types + # if no unicode support... + if not hasattr(types, "UnicodeType"): + stream.write(fs % msg) + else: + try: + stream.write(fs % msg) + except UnicodeError: + stream.write(fs % msg.encode("UTF-8")) + self.flush() + except: + self.handleError(record) diff --git a/deps/cherrypy/_cpmodpy.py b/deps/cherrypy/_cpmodpy.py new file mode 100644 index 00000000..1f80093f --- /dev/null +++ b/deps/cherrypy/_cpmodpy.py @@ -0,0 +1,354 @@ +"""Native adapter for serving CherryPy via mod_python + +Basic usage: + +########################################## +# Application in a module called myapp.py +########################################## + +import cherrypy + +class Root: + @cherrypy.expose + def index(self): + return 'Hi there, Ho there, Hey there' + + +# We will use this method from the mod_python configuration +# as the entry point to our application +def setup_server(): + cherrypy.tree.mount(Root()) + cherrypy.config.update({'environment': 'production', + 'log.screen': False, + 'show_tracebacks': False}) + +########################################## +# mod_python settings for apache2 +# This should reside in your httpd.conf +# or a file that will be loaded at +# apache startup +########################################## + +# Start +DocumentRoot "/" +Listen 8080 +LoadModule python_module /usr/lib/apache2/modules/mod_python.so + + + PythonPath "sys.path+['/path/to/my/application']" + SetHandler python-program + PythonHandler cherrypy._cpmodpy::handler + PythonOption cherrypy.setup myapp::setup_server + PythonDebug On + +# End + +The actual path to your mod_python.so is dependent on your +environment. In this case we suppose a global mod_python +installation on a Linux distribution such as Ubuntu. + +We do set the PythonPath configuration setting so that +your application can be found by from the user running +the apache2 instance. Of course if your application +resides in the global site-package this won't be needed. + +Then restart apache2 and access http://127.0.0.1:8080 +""" + +import logging +import sys +import io + +import cherrypy +from cherrypy._cpcompat import copyitems, ntob +from cherrypy._cperror import format_exc, bare_error +from cherrypy.lib import httputil + + +# ------------------------------ Request-handling + + +def setup(req): + from mod_python import apache + + # Run any setup functions defined by a "PythonOption cherrypy.setup" + # directive. + options = req.get_options() + if 'cherrypy.setup' in options: + for function in options['cherrypy.setup'].split(): + atoms = function.split('::', 1) + if len(atoms) == 1: + mod = __import__(atoms[0], globals(), locals()) + else: + modname, fname = atoms + mod = __import__(modname, globals(), locals(), [fname]) + func = getattr(mod, fname) + func() + + cherrypy.config.update({'log.screen': False, + "tools.ignore_headers.on": True, + "tools.ignore_headers.headers": ['Range'], + }) + + engine = cherrypy.engine + if hasattr(engine, "signal_handler"): + engine.signal_handler.unsubscribe() + if hasattr(engine, "console_control_handler"): + engine.console_control_handler.unsubscribe() + engine.autoreload.unsubscribe() + cherrypy.server.unsubscribe() + + def _log(msg, level): + newlevel = apache.APLOG_ERR + if logging.DEBUG >= level: + newlevel = apache.APLOG_DEBUG + elif logging.INFO >= level: + newlevel = apache.APLOG_INFO + elif logging.WARNING >= level: + newlevel = apache.APLOG_WARNING + # On Windows, req.server is required or the msg will vanish. See + # http://www.modpython.org/pipermail/mod_python/2003-October/014291.html + # Also, "When server is not specified...LogLevel does not apply..." + apache.log_error(msg, newlevel, req.server) + engine.subscribe('log', _log) + + engine.start() + + def cherrypy_cleanup(data): + engine.exit() + try: + # apache.register_cleanup wasn't available until 3.1.4. + apache.register_cleanup(cherrypy_cleanup) + except AttributeError: + req.server.register_cleanup(req, cherrypy_cleanup) + + +class _ReadOnlyRequest: + expose = ('read', 'readline', 'readlines') + + def __init__(self, req): + for method in self.expose: + self.__dict__[method] = getattr(req, method) + + +recursive = False + +_isSetUp = False + + +def handler(req): + from mod_python import apache + try: + global _isSetUp + if not _isSetUp: + setup(req) + _isSetUp = True + + # Obtain a Request object from CherryPy + local = req.connection.local_addr + local = httputil.Host( + local[0], local[1], req.connection.local_host or "") + remote = req.connection.remote_addr + remote = httputil.Host( + remote[0], remote[1], req.connection.remote_host or "") + + scheme = req.parsed_uri[0] or 'http' + req.get_basic_auth_pw() + + try: + # apache.mpm_query only became available in mod_python 3.1 + q = apache.mpm_query + threaded = q(apache.AP_MPMQ_IS_THREADED) + forked = q(apache.AP_MPMQ_IS_FORKED) + except AttributeError: + bad_value = ("You must provide a PythonOption '%s', " + "either 'on' or 'off', when running a version " + "of mod_python < 3.1") + + threaded = options.get('multithread', '').lower() + if threaded == 'on': + threaded = True + elif threaded == 'off': + threaded = False + else: + raise ValueError(bad_value % "multithread") + + forked = options.get('multiprocess', '').lower() + if forked == 'on': + forked = True + elif forked == 'off': + forked = False + else: + raise ValueError(bad_value % "multiprocess") + + sn = cherrypy.tree.script_name(req.uri or "/") + if sn is None: + send_response(req, '404 Not Found', [], '') + else: + app = cherrypy.tree.apps[sn] + method = req.method + path = req.uri + qs = req.args or "" + reqproto = req.protocol + headers = copyitems(req.headers_in) + rfile = _ReadOnlyRequest(req) + prev = None + + try: + redirections = [] + while True: + request, response = app.get_serving(local, remote, scheme, + "HTTP/1.1") + request.login = req.user + request.multithread = bool(threaded) + request.multiprocess = bool(forked) + request.app = app + request.prev = prev + + # Run the CherryPy Request object and obtain the response + try: + request.run(method, path, qs, reqproto, headers, rfile) + break + except cherrypy.InternalRedirect: + ir = sys.exc_info()[1] + app.release_serving() + prev = request + + if not recursive: + if ir.path in redirections: + raise RuntimeError( + "InternalRedirector visited the same URL " + "twice: %r" % ir.path) + else: + # Add the *previous* path_info + qs to + # redirections. + if qs: + qs = "?" + qs + redirections.append(sn + path + qs) + + # Munge environment and try again. + method = "GET" + path = ir.path + qs = ir.query_string + rfile = io.BytesIO() + + send_response( + req, response.output_status, response.header_list, + response.body, response.stream) + finally: + app.release_serving() + except: + tb = format_exc() + cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR) + s, h, b = bare_error() + send_response(req, s, h, b) + return apache.OK + + +def send_response(req, status, headers, body, stream=False): + # Set response status + req.status = int(status[:3]) + + # Set response headers + req.content_type = "text/plain" + for header, value in headers: + if header.lower() == 'content-type': + req.content_type = value + continue + req.headers_out.add(header, value) + + if stream: + # Flush now so the status and headers are sent immediately. + req.flush() + + # Set response body + if isinstance(body, text_or_bytes): + req.write(body) + else: + for seg in body: + req.write(seg) + + +# --------------- Startup tools for CherryPy + mod_python --------------- # +import os +import re +try: + import subprocess + + def popen(fullcmd): + p = subprocess.Popen(fullcmd, shell=True, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + close_fds=True) + return p.stdout +except ImportError: + def popen(fullcmd): + pipein, pipeout = os.popen4(fullcmd) + return pipeout + + +def read_process(cmd, args=""): + fullcmd = "%s %s" % (cmd, args) + pipeout = popen(fullcmd) + try: + firstline = pipeout.readline() + cmd_not_found = re.search( + ntob("(not recognized|No such file|not found)"), + firstline, + re.IGNORECASE + ) + if cmd_not_found: + raise IOError('%s must be on your system path.' % cmd) + output = firstline + pipeout.read() + finally: + pipeout.close() + return output + + +class ModPythonServer(object): + + template = """ +# Apache2 server configuration file for running CherryPy with mod_python. + +DocumentRoot "/" +Listen %(port)s +LoadModule python_module modules/mod_python.so + + + SetHandler python-program + PythonHandler %(handler)s + PythonDebug On +%(opts)s + +""" + + def __init__(self, loc="/", port=80, opts=None, apache_path="apache", + handler="cherrypy._cpmodpy::handler"): + self.loc = loc + self.port = port + self.opts = opts + self.apache_path = apache_path + self.handler = handler + + def start(self): + opts = "".join([" PythonOption %s %s\n" % (k, v) + for k, v in self.opts]) + conf_data = self.template % {"port": self.port, + "loc": self.loc, + "opts": opts, + "handler": self.handler, + } + + mpconf = os.path.join(os.path.dirname(__file__), "cpmodpy.conf") + f = open(mpconf, 'wb') + try: + f.write(conf_data) + finally: + f.close() + + response = read_process(self.apache_path, "-k start -f %s" % mpconf) + self.ready = True + return response + + def stop(self): + os.popen("apache -k stop") + self.ready = False diff --git a/deps/cherrypy/_cpnative_server.py b/deps/cherrypy/_cpnative_server.py new file mode 100644 index 00000000..8edb89c1 --- /dev/null +++ b/deps/cherrypy/_cpnative_server.py @@ -0,0 +1,154 @@ +"""Native adapter for serving CherryPy via its builtin server.""" + +import logging +import sys +import io + +import cherrypy +from cherrypy._cperror import format_exc, bare_error +from cherrypy.lib import httputil +from cherrypy import wsgiserver + + +class NativeGateway(wsgiserver.Gateway): + + recursive = False + + def respond(self): + req = self.req + try: + # Obtain a Request object from CherryPy + local = req.server.bind_addr + local = httputil.Host(local[0], local[1], "") + remote = req.conn.remote_addr, req.conn.remote_port + remote = httputil.Host(remote[0], remote[1], "") + + scheme = req.scheme + sn = cherrypy.tree.script_name(req.uri or "/") + if sn is None: + self.send_response('404 Not Found', [], ['']) + else: + app = cherrypy.tree.apps[sn] + method = req.method + path = req.path + qs = req.qs or "" + headers = req.inheaders.items() + rfile = req.rfile + prev = None + + try: + redirections = [] + while True: + request, response = app.get_serving( + local, remote, scheme, "HTTP/1.1") + request.multithread = True + request.multiprocess = False + request.app = app + request.prev = prev + + # Run the CherryPy Request object and obtain the + # response + try: + request.run(method, path, qs, + req.request_protocol, headers, rfile) + break + except cherrypy.InternalRedirect: + ir = sys.exc_info()[1] + app.release_serving() + prev = request + + if not self.recursive: + if ir.path in redirections: + raise RuntimeError( + "InternalRedirector visited the same " + "URL twice: %r" % ir.path) + else: + # Add the *previous* path_info + qs to + # redirections. + if qs: + qs = "?" + qs + redirections.append(sn + path + qs) + + # Munge environment and try again. + method = "GET" + path = ir.path + qs = ir.query_string + rfile = io.BytesIO() + + self.send_response( + response.output_status, response.header_list, + response.body) + finally: + app.release_serving() + except: + tb = format_exc() + # print tb + cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR) + s, h, b = bare_error() + self.send_response(s, h, b) + + def send_response(self, status, headers, body): + req = self.req + + # Set response status + req.status = str(status or "500 Server Error") + + # Set response headers + for header, value in headers: + req.outheaders.append((header, value)) + if (req.ready and not req.sent_headers): + req.sent_headers = True + req.send_headers() + + # Set response body + for seg in body: + req.write(seg) + + +class CPHTTPServer(wsgiserver.HTTPServer): + + """Wrapper for wsgiserver.HTTPServer. + + wsgiserver has been designed to not reference CherryPy in any way, + so that it can be used in other frameworks and applications. + Therefore, we wrap it here, so we can apply some attributes + from config -> cherrypy.server -> HTTPServer. + """ + + def __init__(self, server_adapter=cherrypy.server): + self.server_adapter = server_adapter + + server_name = (self.server_adapter.socket_host or + self.server_adapter.socket_file or + None) + + wsgiserver.HTTPServer.__init__( + self, server_adapter.bind_addr, NativeGateway, + minthreads=server_adapter.thread_pool, + maxthreads=server_adapter.thread_pool_max, + server_name=server_name) + + self.max_request_header_size = ( + self.server_adapter.max_request_header_size or 0) + self.max_request_body_size = ( + self.server_adapter.max_request_body_size or 0) + self.request_queue_size = self.server_adapter.socket_queue_size + self.timeout = self.server_adapter.socket_timeout + self.shutdown_timeout = self.server_adapter.shutdown_timeout + self.protocol = self.server_adapter.protocol_version + self.nodelay = self.server_adapter.nodelay + + ssl_module = self.server_adapter.ssl_module or 'pyopenssl' + if self.server_adapter.ssl_context: + adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) + self.ssl_adapter = adapter_class( + self.server_adapter.ssl_certificate, + self.server_adapter.ssl_private_key, + self.server_adapter.ssl_certificate_chain) + self.ssl_adapter.context = self.server_adapter.ssl_context + elif self.server_adapter.ssl_certificate: + adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) + self.ssl_adapter = adapter_class( + self.server_adapter.ssl_certificate, + self.server_adapter.ssl_private_key, + self.server_adapter.ssl_certificate_chain) diff --git a/deps/cherrypy/_cpreqbody.py b/deps/cherrypy/_cpreqbody.py new file mode 100644 index 00000000..a55e0abc --- /dev/null +++ b/deps/cherrypy/_cpreqbody.py @@ -0,0 +1,1018 @@ +"""Request body processing for CherryPy. + +.. versionadded:: 3.2 + +Application authors have complete control over the parsing of HTTP request +entities. In short, +:attr:`cherrypy.request.body` +is now always set to an instance of +:class:`RequestBody`, +and *that* class is a subclass of :class:`Entity`. + +When an HTTP request includes an entity body, it is often desirable to +provide that information to applications in a form other than the raw bytes. +Different content types demand different approaches. Examples: + + * For a GIF file, we want the raw bytes in a stream. + * An HTML form is better parsed into its component fields, and each text field + decoded from bytes to unicode. + * A JSON body should be deserialized into a Python dict or list. + +When the request contains a Content-Type header, the media type is used as a +key to look up a value in the +:attr:`request.body.processors` dict. +If the full media +type is not found, then the major type is tried; for example, if no processor +is found for the 'image/jpeg' type, then we look for a processor for the +'image' types altogether. If neither the full type nor the major type has a +matching processor, then a default processor is used +(:func:`default_proc`). For most +types, this means no processing is done, and the body is left unread as a +raw byte stream. Processors are configurable in an 'on_start_resource' hook. + +Some processors, especially those for the 'text' types, attempt to decode bytes +to unicode. If the Content-Type request header includes a 'charset' parameter, +this is used to decode the entity. Otherwise, one or more default charsets may +be attempted, although this decision is up to each processor. If a processor +successfully decodes an Entity or Part, it should set the +:attr:`charset` attribute +on the Entity or Part to the name of the successful charset, so that +applications can easily re-encode or transcode the value if they wish. + +If the Content-Type of the request entity is of major type 'multipart', then +the above parsing process, and possibly a decoding process, is performed for +each part. + +For both the full entity and multipart parts, a Content-Disposition header may +be used to fill :attr:`name` and +:attr:`filename` attributes on the +request.body or the Part. + +.. _custombodyprocessors: + +Custom Processors +================= + +You can add your own processors for any specific or major MIME type. Simply add +it to the :attr:`processors` dict in a +hook/tool that runs at ``on_start_resource`` or ``before_request_body``. +Here's the built-in JSON tool for an example:: + + def json_in(force=True, debug=False): + request = cherrypy.serving.request + def json_processor(entity): + \"""Read application/json data into request.json.\""" + if not entity.headers.get("Content-Length", ""): + raise cherrypy.HTTPError(411) + + body = entity.fp.read() + try: + request.json = json_decode(body) + except ValueError: + raise cherrypy.HTTPError(400, 'Invalid JSON document') + if force: + request.body.processors.clear() + request.body.default_proc = cherrypy.HTTPError( + 415, 'Expected an application/json content type') + request.body.processors['application/json'] = json_processor + +We begin by defining a new ``json_processor`` function to stick in the +``processors`` dictionary. All processor functions take a single argument, +the ``Entity`` instance they are to process. It will be called whenever a +request is received (for those URI's where the tool is turned on) which +has a ``Content-Type`` of "application/json". + +First, it checks for a valid ``Content-Length`` (raising 411 if not valid), +then reads the remaining bytes on the socket. The ``fp`` object knows its +own length, so it won't hang waiting for data that never arrives. It will +return when all data has been read. Then, we decode those bytes using +Python's built-in ``json`` module, and stick the decoded result onto +``request.json`` . If it cannot be decoded, we raise 400. + +If the "force" argument is True (the default), the ``Tool`` clears the +``processors`` dict so that request entities of other ``Content-Types`` +aren't parsed at all. Since there's no entry for those invalid MIME +types, the ``default_proc`` method of ``cherrypy.request.body`` is +called. But this does nothing by default (usually to provide the page +handler an opportunity to handle it.) +But in our case, we want to raise 415, so we replace +``request.body.default_proc`` +with the error (``HTTPError`` instances, when called, raise themselves). + +If we were defining a custom processor, we can do so without making a ``Tool``. +Just add the config entry:: + + request.body.processors = {'application/json': json_processor} + +Note that you can only replace the ``processors`` dict wholesale this way, +not update the existing one. +""" + +try: + from io import DEFAULT_BUFFER_SIZE +except ImportError: + DEFAULT_BUFFER_SIZE = 8192 +import re +import sys +import tempfile +try: + from urllib import unquote_plus +except ImportError: + def unquote_plus(bs): + """Bytes version of urllib.parse.unquote_plus.""" + bs = bs.replace(ntob('+'), ntob(' ')) + atoms = bs.split(ntob('%')) + for i in range(1, len(atoms)): + item = atoms[i] + try: + pct = int(item[:2], 16) + atoms[i] = bytes([pct]) + item[2:] + except ValueError: + pass + return ntob('').join(atoms) + +import cherrypy +from cherrypy._cpcompat import text_or_bytes, ntob, ntou +from cherrypy.lib import httputil + + +# ------------------------------- Processors -------------------------------- # + +def process_urlencoded(entity): + """Read application/x-www-form-urlencoded data into entity.params.""" + qs = entity.fp.read() + for charset in entity.attempt_charsets: + try: + params = {} + for aparam in qs.split(ntob('&')): + for pair in aparam.split(ntob(';')): + if not pair: + continue + + atoms = pair.split(ntob('='), 1) + if len(atoms) == 1: + atoms.append(ntob('')) + + key = unquote_plus(atoms[0]).decode(charset) + value = unquote_plus(atoms[1]).decode(charset) + + if key in params: + if not isinstance(params[key], list): + params[key] = [params[key]] + params[key].append(value) + else: + params[key] = value + except UnicodeDecodeError: + pass + else: + entity.charset = charset + break + else: + raise cherrypy.HTTPError( + 400, "The request entity could not be decoded. The following " + "charsets were attempted: %s" % repr(entity.attempt_charsets)) + + # Now that all values have been successfully parsed and decoded, + # apply them to the entity.params dict. + for key, value in params.items(): + if key in entity.params: + if not isinstance(entity.params[key], list): + entity.params[key] = [entity.params[key]] + entity.params[key].append(value) + else: + entity.params[key] = value + + +def process_multipart(entity): + """Read all multipart parts into entity.parts.""" + ib = "" + if 'boundary' in entity.content_type.params: + # http://tools.ietf.org/html/rfc2046#section-5.1.1 + # "The grammar for parameters on the Content-type field is such that it + # is often necessary to enclose the boundary parameter values in quotes + # on the Content-type line" + ib = entity.content_type.params['boundary'].strip('"') + + if not re.match("^[ -~]{0,200}[!-~]$", ib): + raise ValueError('Invalid boundary in multipart form: %r' % (ib,)) + + ib = ('--' + ib).encode('ascii') + + # Find the first marker + while True: + b = entity.readline() + if not b: + return + + b = b.strip() + if b == ib: + break + + # Read all parts + while True: + part = entity.part_class.from_fp(entity.fp, ib) + entity.parts.append(part) + part.process() + if part.fp.done: + break + + +def process_multipart_form_data(entity): + """Read all multipart/form-data parts into entity.parts or entity.params. + """ + process_multipart(entity) + + kept_parts = [] + for part in entity.parts: + if part.name is None: + kept_parts.append(part) + else: + if part.filename is None: + # It's a regular field + value = part.fullvalue() + else: + # It's a file upload. Retain the whole part so consumer code + # has access to its .file and .filename attributes. + value = part + + if part.name in entity.params: + if not isinstance(entity.params[part.name], list): + entity.params[part.name] = [entity.params[part.name]] + entity.params[part.name].append(value) + else: + entity.params[part.name] = value + + entity.parts = kept_parts + + +def _old_process_multipart(entity): + """The behavior of 3.2 and lower. Deprecated and will be changed in 3.3.""" + process_multipart(entity) + + params = entity.params + + for part in entity.parts: + if part.name is None: + key = ntou('parts') + else: + key = part.name + + if part.filename is None: + # It's a regular field + value = part.fullvalue() + else: + # It's a file upload. Retain the whole part so consumer code + # has access to its .file and .filename attributes. + value = part + + if key in params: + if not isinstance(params[key], list): + params[key] = [params[key]] + params[key].append(value) + else: + params[key] = value + + +# -------------------------------- Entities --------------------------------- # +class Entity(object): + + """An HTTP request body, or MIME multipart body. + + This class collects information about the HTTP request entity. When a + given entity is of MIME type "multipart", each part is parsed into its own + Entity instance, and the set of parts stored in + :attr:`entity.parts`. + + Between the ``before_request_body`` and ``before_handler`` tools, CherryPy + tries to process the request body (if any) by calling + :func:`request.body.process`. + This uses the ``content_type`` of the Entity to look up a suitable + processor in + :attr:`Entity.processors`, + a dict. + If a matching processor cannot be found for the complete Content-Type, + it tries again using the major type. For example, if a request with an + entity of type "image/jpeg" arrives, but no processor can be found for + that complete type, then one is sought for the major type "image". If a + processor is still not found, then the + :func:`default_proc` method + of the Entity is called (which does nothing by default; you can + override this too). + + CherryPy includes processors for the "application/x-www-form-urlencoded" + type, the "multipart/form-data" type, and the "multipart" major type. + CherryPy 3.2 processes these types almost exactly as older versions. + Parts are passed as arguments to the page handler using their + ``Content-Disposition.name`` if given, otherwise in a generic "parts" + argument. Each such part is either a string, or the + :class:`Part` itself if it's a file. (In this + case it will have ``file`` and ``filename`` attributes, or possibly a + ``value`` attribute). Each Part is itself a subclass of + Entity, and has its own ``process`` method and ``processors`` dict. + + There is a separate processor for the "multipart" major type which is more + flexible, and simply stores all multipart parts in + :attr:`request.body.parts`. You can + enable it with:: + + cherrypy.request.body.processors['multipart'] = _cpreqbody.process_multipart + + in an ``on_start_resource`` tool. + """ + + # http://tools.ietf.org/html/rfc2046#section-4.1.2: + # "The default character set, which must be assumed in the + # absence of a charset parameter, is US-ASCII." + # However, many browsers send data in utf-8 with no charset. + attempt_charsets = ['utf-8'] + """A list of strings, each of which should be a known encoding. + + When the Content-Type of the request body warrants it, each of the given + encodings will be tried in order. The first one to successfully decode the + entity without raising an error is stored as + :attr:`entity.charset`. This defaults + to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by + `HTTP/1.1 `_), + but ``['us-ascii', 'utf-8']`` for multipart parts. + """ + + charset = None + """The successful decoding; see "attempt_charsets" above.""" + + content_type = None + """The value of the Content-Type request header. + + If the Entity is part of a multipart payload, this will be the Content-Type + given in the MIME headers for this part. + """ + + default_content_type = 'application/x-www-form-urlencoded' + """This defines a default ``Content-Type`` to use if no Content-Type header + is given. The empty string is used for RequestBody, which results in the + request body not being read or parsed at all. This is by design; a missing + ``Content-Type`` header in the HTTP request entity is an error at best, + and a security hole at worst. For multipart parts, however, the MIME spec + declares that a part with no Content-Type defaults to "text/plain" + (see :class:`Part`). + """ + + filename = None + """The ``Content-Disposition.filename`` header, if available.""" + + fp = None + """The readable socket file object.""" + + headers = None + """A dict of request/multipart header names and values. + + This is a copy of the ``request.headers`` for the ``request.body``; + for multipart parts, it is the set of headers for that part. + """ + + length = None + """The value of the ``Content-Length`` header, if provided.""" + + name = None + """The "name" parameter of the ``Content-Disposition`` header, if any.""" + + params = None + """ + If the request Content-Type is 'application/x-www-form-urlencoded' or + multipart, this will be a dict of the params pulled from the entity + body; that is, it will be the portion of request.params that come + from the message body (sometimes called "POST params", although they + can be sent with various HTTP method verbs). This value is set between + the 'before_request_body' and 'before_handler' hooks (assuming that + process_request_body is True).""" + + processors = {'application/x-www-form-urlencoded': process_urlencoded, + 'multipart/form-data': process_multipart_form_data, + 'multipart': process_multipart, + } + """A dict of Content-Type names to processor methods.""" + + parts = None + """A list of Part instances if ``Content-Type`` is of major type + "multipart".""" + + part_class = None + """The class used for multipart parts. + + You can replace this with custom subclasses to alter the processing of + multipart parts. + """ + + def __init__(self, fp, headers, params=None, parts=None): + # Make an instance-specific copy of the class processors + # so Tools, etc. can replace them per-request. + self.processors = self.processors.copy() + + self.fp = fp + self.headers = headers + + if params is None: + params = {} + self.params = params + + if parts is None: + parts = [] + self.parts = parts + + # Content-Type + self.content_type = headers.elements('Content-Type') + if self.content_type: + self.content_type = self.content_type[0] + else: + self.content_type = httputil.HeaderElement.from_str( + self.default_content_type) + + # Copy the class 'attempt_charsets', prepending any Content-Type + # charset + dec = self.content_type.params.get("charset", None) + if dec: + self.attempt_charsets = [dec] + [c for c in self.attempt_charsets + if c != dec] + else: + self.attempt_charsets = self.attempt_charsets[:] + + # Length + self.length = None + clen = headers.get('Content-Length', None) + # If Transfer-Encoding is 'chunked', ignore any Content-Length. + if ( + clen is not None and + 'chunked' not in headers.get('Transfer-Encoding', '') + ): + try: + self.length = int(clen) + except ValueError: + pass + + # Content-Disposition + self.name = None + self.filename = None + disp = headers.elements('Content-Disposition') + if disp: + disp = disp[0] + if 'name' in disp.params: + self.name = disp.params['name'] + if self.name.startswith('"') and self.name.endswith('"'): + self.name = self.name[1:-1] + if 'filename' in disp.params: + self.filename = disp.params['filename'] + if ( + self.filename.startswith('"') and + self.filename.endswith('"') + ): + self.filename = self.filename[1:-1] + + # The 'type' attribute is deprecated in 3.2; remove it in 3.3. + type = property( + lambda self: self.content_type, + doc="A deprecated alias for " + ":attr:`content_type`." + ) + + def read(self, size=None, fp_out=None): + return self.fp.read(size, fp_out) + + def readline(self, size=None): + return self.fp.readline(size) + + def readlines(self, sizehint=None): + return self.fp.readlines(sizehint) + + def __iter__(self): + return self + + def __next__(self): + line = self.readline() + if not line: + raise StopIteration + return line + + def next(self): + return self.__next__() + + def read_into_file(self, fp_out=None): + """Read the request body into fp_out (or make_file() if None). + + Return fp_out. + """ + if fp_out is None: + fp_out = self.make_file() + self.read(fp_out=fp_out) + return fp_out + + def make_file(self): + """Return a file-like object into which the request body will be read. + + By default, this will return a TemporaryFile. Override as needed. + See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`.""" + return tempfile.TemporaryFile() + + def fullvalue(self): + """Return this entity as a string, whether stored in a file or not.""" + if self.file: + # It was stored in a tempfile. Read it. + self.file.seek(0) + value = self.file.read() + self.file.seek(0) + else: + value = self.value + value = self.decode_entity(value) + return value + + def decode_entity(self , value): + """Return a given byte encoded value as a string""" + for charset in self.attempt_charsets: + try: + value = value.decode(charset) + except UnicodeDecodeError: + pass + else: + self.charset = charset + return value + else: + raise cherrypy.HTTPError( + 400, + "The request entity could not be decoded. The following " + "charsets were attempted: %s" % repr(self.attempt_charsets) + ) + + def process(self): + """Execute the best-match processor for the given media type.""" + proc = None + ct = self.content_type.value + try: + proc = self.processors[ct] + except KeyError: + toptype = ct.split('/', 1)[0] + try: + proc = self.processors[toptype] + except KeyError: + pass + if proc is None: + self.default_proc() + else: + proc(self) + + def default_proc(self): + """Called if a more-specific processor is not found for the + ``Content-Type``. + """ + # Leave the fp alone for someone else to read. This works fine + # for request.body, but the Part subclasses need to override this + # so they can move on to the next part. + pass + + +class Part(Entity): + + """A MIME part entity, part of a multipart entity.""" + + # "The default character set, which must be assumed in the absence of a + # charset parameter, is US-ASCII." + attempt_charsets = ['us-ascii', 'utf-8'] + """A list of strings, each of which should be a known encoding. + + When the Content-Type of the request body warrants it, each of the given + encodings will be tried in order. The first one to successfully decode the + entity without raising an error is stored as + :attr:`entity.charset`. This defaults + to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by + `HTTP/1.1 `_), + but ``['us-ascii', 'utf-8']`` for multipart parts. + """ + + boundary = None + """The MIME multipart boundary.""" + + default_content_type = 'text/plain' + """This defines a default ``Content-Type`` to use if no Content-Type header + is given. The empty string is used for RequestBody, which results in the + request body not being read or parsed at all. This is by design; a missing + ``Content-Type`` header in the HTTP request entity is an error at best, + and a security hole at worst. For multipart parts, however (this class), + the MIME spec declares that a part with no Content-Type defaults to + "text/plain". + """ + + # This is the default in stdlib cgi. We may want to increase it. + maxrambytes = 1000 + """The threshold of bytes after which point the ``Part`` will store + its data in a file (generated by + :func:`make_file`) + instead of a string. Defaults to 1000, just like the :mod:`cgi` + module in Python's standard library. + """ + + def __init__(self, fp, headers, boundary): + Entity.__init__(self, fp, headers) + self.boundary = boundary + self.file = None + self.value = None + + @classmethod + def from_fp(cls, fp, boundary): + headers = cls.read_headers(fp) + return cls(fp, headers, boundary) + + @classmethod + def read_headers(cls, fp): + headers = httputil.HeaderMap() + while True: + line = fp.readline() + if not line: + # No more data--illegal end of headers + raise EOFError("Illegal end of headers.") + + if line == ntob('\r\n'): + # Normal end of headers + break + if not line.endswith(ntob('\r\n')): + raise ValueError("MIME requires CRLF terminators: %r" % line) + + if line[0] in ntob(' \t'): + # It's a continuation line. + v = line.strip().decode('ISO-8859-1') + else: + k, v = line.split(ntob(":"), 1) + k = k.strip().decode('ISO-8859-1') + v = v.strip().decode('ISO-8859-1') + + existing = headers.get(k) + if existing: + v = ", ".join((existing, v)) + headers[k] = v + + return headers + + def read_lines_to_boundary(self, fp_out=None): + """Read bytes from self.fp and return or write them to a file. + + If the 'fp_out' argument is None (the default), all bytes read are + returned in a single byte string. + + If the 'fp_out' argument is not None, it must be a file-like + object that supports the 'write' method; all bytes read will be + written to the fp, and that fp is returned. + """ + endmarker = self.boundary + ntob("--") + delim = ntob("") + prev_lf = True + lines = [] + seen = 0 + while True: + line = self.fp.readline(1 << 16) + if not line: + raise EOFError("Illegal end of multipart body.") + if line.startswith(ntob("--")) and prev_lf: + strippedline = line.strip() + if strippedline == self.boundary: + break + if strippedline == endmarker: + self.fp.finish() + break + + line = delim + line + + if line.endswith(ntob("\r\n")): + delim = ntob("\r\n") + line = line[:-2] + prev_lf = True + elif line.endswith(ntob("\n")): + delim = ntob("\n") + line = line[:-1] + prev_lf = True + else: + delim = ntob("") + prev_lf = False + + if fp_out is None: + lines.append(line) + seen += len(line) + if seen > self.maxrambytes: + fp_out = self.make_file() + for line in lines: + fp_out.write(line) + else: + fp_out.write(line) + + if fp_out is None: + result = ntob('').join(lines) + return result + else: + fp_out.seek(0) + return fp_out + + def default_proc(self): + """Called if a more-specific processor is not found for the + ``Content-Type``. + """ + if self.filename: + # Always read into a file if a .filename was given. + self.file = self.read_into_file() + else: + result = self.read_lines_to_boundary() + if isinstance(result, text_or_bytes): + self.value = result + else: + self.file = result + + def read_into_file(self, fp_out=None): + """Read the request body into fp_out (or make_file() if None). + + Return fp_out. + """ + if fp_out is None: + fp_out = self.make_file() + self.read_lines_to_boundary(fp_out=fp_out) + return fp_out + +Entity.part_class = Part + +try: + inf = float('inf') +except ValueError: + # Python 2.4 and lower + class Infinity(object): + + def __cmp__(self, other): + return 1 + + def __sub__(self, other): + return self + inf = Infinity() + + +comma_separated_headers = [ + 'Accept', 'Accept-Charset', 'Accept-Encoding', + 'Accept-Language', 'Accept-Ranges', 'Allow', + 'Cache-Control', 'Connection', 'Content-Encoding', + 'Content-Language', 'Expect', 'If-Match', + 'If-None-Match', 'Pragma', 'Proxy-Authenticate', + 'Te', 'Trailer', 'Transfer-Encoding', 'Upgrade', + 'Vary', 'Via', 'Warning', 'Www-Authenticate' +] + + +class SizedReader: + + def __init__(self, fp, length, maxbytes, bufsize=DEFAULT_BUFFER_SIZE, + has_trailers=False): + # Wrap our fp in a buffer so peek() works + self.fp = fp + self.length = length + self.maxbytes = maxbytes + self.buffer = ntob('') + self.bufsize = bufsize + self.bytes_read = 0 + self.done = False + self.has_trailers = has_trailers + + def read(self, size=None, fp_out=None): + """Read bytes from the request body and return or write them to a file. + + A number of bytes less than or equal to the 'size' argument are read + off the socket. The actual number of bytes read are tracked in + self.bytes_read. The number may be smaller than 'size' when 1) the + client sends fewer bytes, 2) the 'Content-Length' request header + specifies fewer bytes than requested, or 3) the number of bytes read + exceeds self.maxbytes (in which case, 413 is raised). + + If the 'fp_out' argument is None (the default), all bytes read are + returned in a single byte string. + + If the 'fp_out' argument is not None, it must be a file-like + object that supports the 'write' method; all bytes read will be + written to the fp, and None is returned. + """ + + if self.length is None: + if size is None: + remaining = inf + else: + remaining = size + else: + remaining = self.length - self.bytes_read + if size and size < remaining: + remaining = size + if remaining == 0: + self.finish() + if fp_out is None: + return ntob('') + else: + return None + + chunks = [] + + # Read bytes from the buffer. + if self.buffer: + if remaining is inf: + data = self.buffer + self.buffer = ntob('') + else: + data = self.buffer[:remaining] + self.buffer = self.buffer[remaining:] + datalen = len(data) + remaining -= datalen + + # Check lengths. + self.bytes_read += datalen + if self.maxbytes and self.bytes_read > self.maxbytes: + raise cherrypy.HTTPError(413) + + # Store the data. + if fp_out is None: + chunks.append(data) + else: + fp_out.write(data) + + # Read bytes from the socket. + while remaining > 0: + chunksize = min(remaining, self.bufsize) + try: + data = self.fp.read(chunksize) + except Exception: + e = sys.exc_info()[1] + if e.__class__.__name__ == 'MaxSizeExceeded': + # Post data is too big + raise cherrypy.HTTPError( + 413, "Maximum request length: %r" % e.args[1]) + else: + raise + if not data: + self.finish() + break + datalen = len(data) + remaining -= datalen + + # Check lengths. + self.bytes_read += datalen + if self.maxbytes and self.bytes_read > self.maxbytes: + raise cherrypy.HTTPError(413) + + # Store the data. + if fp_out is None: + chunks.append(data) + else: + fp_out.write(data) + + if fp_out is None: + return ntob('').join(chunks) + + def readline(self, size=None): + """Read a line from the request body and return it.""" + chunks = [] + while size is None or size > 0: + chunksize = self.bufsize + if size is not None and size < self.bufsize: + chunksize = size + data = self.read(chunksize) + if not data: + break + pos = data.find(ntob('\n')) + 1 + if pos: + chunks.append(data[:pos]) + remainder = data[pos:] + self.buffer += remainder + self.bytes_read -= len(remainder) + break + else: + chunks.append(data) + return ntob('').join(chunks) + + def readlines(self, sizehint=None): + """Read lines from the request body and return them.""" + if self.length is not None: + if sizehint is None: + sizehint = self.length - self.bytes_read + else: + sizehint = min(sizehint, self.length - self.bytes_read) + + lines = [] + seen = 0 + while True: + line = self.readline() + if not line: + break + lines.append(line) + seen += len(line) + if seen >= sizehint: + break + return lines + + def finish(self): + self.done = True + if self.has_trailers and hasattr(self.fp, 'read_trailer_lines'): + self.trailers = {} + + try: + for line in self.fp.read_trailer_lines(): + if line[0] in ntob(' \t'): + # It's a continuation line. + v = line.strip() + else: + try: + k, v = line.split(ntob(":"), 1) + except ValueError: + raise ValueError("Illegal header line.") + k = k.strip().title() + v = v.strip() + + if k in comma_separated_headers: + existing = self.trailers.get(envname) + if existing: + v = ntob(", ").join((existing, v)) + self.trailers[k] = v + except Exception: + e = sys.exc_info()[1] + if e.__class__.__name__ == 'MaxSizeExceeded': + # Post data is too big + raise cherrypy.HTTPError( + 413, "Maximum request length: %r" % e.args[1]) + else: + raise + + +class RequestBody(Entity): + + """The entity of the HTTP request.""" + + bufsize = 8 * 1024 + """The buffer size used when reading the socket.""" + + # Don't parse the request body at all if the client didn't provide + # a Content-Type header. See + # https://github.com/cherrypy/cherrypy/issues/790 + default_content_type = '' + """This defines a default ``Content-Type`` to use if no Content-Type header + is given. The empty string is used for RequestBody, which results in the + request body not being read or parsed at all. This is by design; a missing + ``Content-Type`` header in the HTTP request entity is an error at best, + and a security hole at worst. For multipart parts, however, the MIME spec + declares that a part with no Content-Type defaults to "text/plain" + (see :class:`Part`). + """ + + maxbytes = None + """Raise ``MaxSizeExceeded`` if more bytes than this are read from + the socket. + """ + + def __init__(self, fp, headers, params=None, request_params=None): + Entity.__init__(self, fp, headers, params) + + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1 + # When no explicit charset parameter is provided by the + # sender, media subtypes of the "text" type are defined + # to have a default charset value of "ISO-8859-1" when + # received via HTTP. + if self.content_type.value.startswith('text/'): + for c in ('ISO-8859-1', 'iso-8859-1', 'Latin-1', 'latin-1'): + if c in self.attempt_charsets: + break + else: + self.attempt_charsets.append('ISO-8859-1') + + # Temporary fix while deprecating passing .parts as .params. + self.processors['multipart'] = _old_process_multipart + + if request_params is None: + request_params = {} + self.request_params = request_params + + def process(self): + """Process the request entity based on its Content-Type.""" + # "The presence of a message-body in a request is signaled by the + # inclusion of a Content-Length or Transfer-Encoding header field in + # the request's message-headers." + # It is possible to send a POST request with no body, for example; + # however, app developers are responsible in that case to set + # cherrypy.request.process_body to False so this method isn't called. + h = cherrypy.serving.request.headers + if 'Content-Length' not in h and 'Transfer-Encoding' not in h: + raise cherrypy.HTTPError(411) + + self.fp = SizedReader(self.fp, self.length, + self.maxbytes, bufsize=self.bufsize, + has_trailers='Trailer' in h) + super(RequestBody, self).process() + + # Body params should also be a part of the request_params + # add them in here. + request_params = self.request_params + for key, value in self.params.items(): + # Python 2 only: keyword arguments must be byte strings (type + # 'str'). + if sys.version_info < (3, 0): + if isinstance(key, unicode): + key = key.encode('ISO-8859-1') + + if key in request_params: + if not isinstance(request_params[key], list): + request_params[key] = [request_params[key]] + request_params[key].append(value) + else: + request_params[key] = value diff --git a/deps/cherrypy/_cprequest.py b/deps/cherrypy/_cprequest.py new file mode 100644 index 00000000..81771497 --- /dev/null +++ b/deps/cherrypy/_cprequest.py @@ -0,0 +1,973 @@ +import sys +import time +import warnings + +import six + +import cherrypy +from cherrypy._cpcompat import text_or_bytes, copykeys, ntob +from cherrypy._cpcompat import SimpleCookie, CookieError +from cherrypy import _cpreqbody, _cpconfig +from cherrypy._cperror import format_exc, bare_error +from cherrypy.lib import httputil, file_generator + + +class Hook(object): + + """A callback and its metadata: failsafe, priority, and kwargs.""" + + callback = None + """ + The bare callable that this Hook object is wrapping, which will + be called when the Hook is called.""" + + failsafe = False + """ + If True, the callback is guaranteed to run even if other callbacks + from the same call point raise exceptions.""" + + priority = 50 + """ + Defines the order of execution for a list of Hooks. Priority numbers + should be limited to the closed interval [0, 100], but values outside + this range are acceptable, as are fractional values.""" + + kwargs = {} + """ + A set of keyword arguments that will be passed to the + callable on each call.""" + + def __init__(self, callback, failsafe=None, priority=None, **kwargs): + self.callback = callback + + if failsafe is None: + failsafe = getattr(callback, "failsafe", False) + self.failsafe = failsafe + + if priority is None: + priority = getattr(callback, "priority", 50) + self.priority = priority + + self.kwargs = kwargs + + def __lt__(self, other): + # Python 3 + return self.priority < other.priority + + def __cmp__(self, other): + # Python 2 + return cmp(self.priority, other.priority) + + def __call__(self): + """Run self.callback(**self.kwargs).""" + return self.callback(**self.kwargs) + + def __repr__(self): + cls = self.__class__ + return ("%s.%s(callback=%r, failsafe=%r, priority=%r, %s)" + % (cls.__module__, cls.__name__, self.callback, + self.failsafe, self.priority, + ", ".join(['%s=%r' % (k, v) + for k, v in self.kwargs.items()]))) + + +class HookMap(dict): + + """A map of call points to lists of callbacks (Hook objects).""" + + def __new__(cls, points=None): + d = dict.__new__(cls) + for p in points or []: + d[p] = [] + return d + + def __init__(self, *a, **kw): + pass + + def attach(self, point, callback, failsafe=None, priority=None, **kwargs): + """Append a new Hook made from the supplied arguments.""" + self[point].append(Hook(callback, failsafe, priority, **kwargs)) + + def run(self, point): + """Execute all registered Hooks (callbacks) for the given point.""" + exc = None + hooks = self[point] + hooks.sort() + for hook in hooks: + # Some hooks are guaranteed to run even if others at + # the same hookpoint fail. We will still log the failure, + # but proceed on to the next hook. The only way + # to stop all processing from one of these hooks is + # to raise SystemExit and stop the whole server. + if exc is None or hook.failsafe: + try: + hook() + except (KeyboardInterrupt, SystemExit): + raise + except (cherrypy.HTTPError, cherrypy.HTTPRedirect, + cherrypy.InternalRedirect): + exc = sys.exc_info()[1] + except: + exc = sys.exc_info()[1] + cherrypy.log(traceback=True, severity=40) + if exc: + raise exc + + def __copy__(self): + newmap = self.__class__() + # We can't just use 'update' because we want copies of the + # mutable values (each is a list) as well. + for k, v in self.items(): + newmap[k] = v[:] + return newmap + copy = __copy__ + + def __repr__(self): + cls = self.__class__ + return "%s.%s(points=%r)" % ( + cls.__module__, + cls.__name__, + copykeys(self) + ) + + +# Config namespace handlers + +def hooks_namespace(k, v): + """Attach bare hooks declared in config.""" + # Use split again to allow multiple hooks for a single + # hookpoint per path (e.g. "hooks.before_handler.1"). + # Little-known fact you only get from reading source ;) + hookpoint = k.split(".", 1)[0] + if isinstance(v, text_or_bytes): + v = cherrypy.lib.attributes(v) + if not isinstance(v, Hook): + v = Hook(v) + cherrypy.serving.request.hooks[hookpoint].append(v) + + +def request_namespace(k, v): + """Attach request attributes declared in config.""" + # Provides config entries to set request.body attrs (like + # attempt_charsets). + if k[:5] == 'body.': + setattr(cherrypy.serving.request.body, k[5:], v) + else: + setattr(cherrypy.serving.request, k, v) + + +def response_namespace(k, v): + """Attach response attributes declared in config.""" + # Provides config entries to set default response headers + # http://cherrypy.org/ticket/889 + if k[:8] == 'headers.': + cherrypy.serving.response.headers[k.split('.', 1)[1]] = v + else: + setattr(cherrypy.serving.response, k, v) + + +def error_page_namespace(k, v): + """Attach error pages declared in config.""" + if k != 'default': + k = int(k) + cherrypy.serving.request.error_page[k] = v + + +hookpoints = ['on_start_resource', 'before_request_body', + 'before_handler', 'before_finalize', + 'on_end_resource', 'on_end_request', + 'before_error_response', 'after_error_response'] + + +class Request(object): + + """An HTTP request. + + This object represents the metadata of an HTTP request message; + that is, it contains attributes which describe the environment + in which the request URL, headers, and body were sent (if you + want tools to interpret the headers and body, those are elsewhere, + mostly in Tools). This 'metadata' consists of socket data, + transport characteristics, and the Request-Line. This object + also contains data regarding the configuration in effect for + the given URL, and the execution plan for generating a response. + """ + + prev = None + """ + The previous Request object (if any). This should be None + unless we are processing an InternalRedirect.""" + + # Conversation/connection attributes + local = httputil.Host("127.0.0.1", 80) + "An httputil.Host(ip, port, hostname) object for the server socket." + + remote = httputil.Host("127.0.0.1", 1111) + "An httputil.Host(ip, port, hostname) object for the client socket." + + scheme = "http" + """ + The protocol used between client and server. In most cases, + this will be either 'http' or 'https'.""" + + server_protocol = "HTTP/1.1" + """ + The HTTP version for which the HTTP server is at least + conditionally compliant.""" + + base = "" + """The (scheme://host) portion of the requested URL. + In some cases (e.g. when proxying via mod_rewrite), this may contain + path segments which cherrypy.url uses when constructing url's, but + which otherwise are ignored by CherryPy. Regardless, this value + MUST NOT end in a slash.""" + + # Request-Line attributes + request_line = "" + """ + The complete Request-Line received from the client. This is a + single string consisting of the request method, URI, and protocol + version (joined by spaces). Any final CRLF is removed.""" + + method = "GET" + """ + Indicates the HTTP method to be performed on the resource identified + by the Request-URI. Common methods include GET, HEAD, POST, PUT, and + DELETE. CherryPy allows any extension method; however, various HTTP + servers and gateways may restrict the set of allowable methods. + CherryPy applications SHOULD restrict the set (on a per-URI basis).""" + + query_string = "" + """ + The query component of the Request-URI, a string of information to be + interpreted by the resource. The query portion of a URI follows the + path component, and is separated by a '?'. For example, the URI + 'http://www.cherrypy.org/wiki?a=3&b=4' has the query component, + 'a=3&b=4'.""" + + query_string_encoding = 'utf8' + """ + The encoding expected for query string arguments after % HEX HEX decoding). + If a query string is provided that cannot be decoded with this encoding, + 404 is raised (since technically it's a different URI). If you want + arbitrary encodings to not error, set this to 'Latin-1'; you can then + encode back to bytes and re-decode to whatever encoding you like later. + """ + + protocol = (1, 1) + """The HTTP protocol version corresponding to the set + of features which should be allowed in the response. If BOTH + the client's request message AND the server's level of HTTP + compliance is HTTP/1.1, this attribute will be the tuple (1, 1). + If either is 1.0, this attribute will be the tuple (1, 0). + Lower HTTP protocol versions are not explicitly supported.""" + + params = {} + """ + A dict which combines query string (GET) and request entity (POST) + variables. This is populated in two stages: GET params are added + before the 'on_start_resource' hook, and POST params are added + between the 'before_request_body' and 'before_handler' hooks.""" + + # Message attributes + header_list = [] + """ + A list of the HTTP request headers as (name, value) tuples. + In general, you should use request.headers (a dict) instead.""" + + headers = httputil.HeaderMap() + """ + A dict-like object containing the request headers. Keys are header + names (in Title-Case format); however, you may get and set them in + a case-insensitive manner. That is, headers['Content-Type'] and + headers['content-type'] refer to the same value. Values are header + values (decoded according to :rfc:`2047` if necessary). See also: + httputil.HeaderMap, httputil.HeaderElement.""" + + cookie = SimpleCookie() + """See help(Cookie).""" + + rfile = None + """ + If the request included an entity (body), it will be available + as a stream in this attribute. However, the rfile will normally + be read for you between the 'before_request_body' hook and the + 'before_handler' hook, and the resulting string is placed into + either request.params or the request.body attribute. + + You may disable the automatic consumption of the rfile by setting + request.process_request_body to False, either in config for the desired + path, or in an 'on_start_resource' or 'before_request_body' hook. + + WARNING: In almost every case, you should not attempt to read from the + rfile stream after CherryPy's automatic mechanism has read it. If you + turn off the automatic parsing of rfile, you should read exactly the + number of bytes specified in request.headers['Content-Length']. + Ignoring either of these warnings may result in a hung request thread + or in corruption of the next (pipelined) request. + """ + + process_request_body = True + """ + If True, the rfile (if any) is automatically read and parsed, + and the result placed into request.params or request.body.""" + + methods_with_bodies = ("POST", "PUT") + """ + A sequence of HTTP methods for which CherryPy will automatically + attempt to read a body from the rfile. If you are going to change + this property, modify it on the configuration (recommended) + or on the "hook point" `on_start_resource`. + """ + + body = None + """ + If the request Content-Type is 'application/x-www-form-urlencoded' + or multipart, this will be None. Otherwise, this will be an instance + of :class:`RequestBody` (which you + can .read()); this value is set between the 'before_request_body' and + 'before_handler' hooks (assuming that process_request_body is True).""" + + # Dispatch attributes + dispatch = cherrypy.dispatch.Dispatcher() + """ + The object which looks up the 'page handler' callable and collects + config for the current request based on the path_info, other + request attributes, and the application architecture. The core + calls the dispatcher as early as possible, passing it a 'path_info' + argument. + + The default dispatcher discovers the page handler by matching path_info + to a hierarchical arrangement of objects, starting at request.app.root. + See help(cherrypy.dispatch) for more information.""" + + script_name = "" + """ + The 'mount point' of the application which is handling this request. + + This attribute MUST NOT end in a slash. If the script_name refers to + the root of the URI, it MUST be an empty string (not "/"). + """ + + path_info = "/" + """ + The 'relative path' portion of the Request-URI. This is relative + to the script_name ('mount point') of the application which is + handling this request.""" + + login = None + """ + When authentication is used during the request processing this is + set to 'False' if it failed and to the 'username' value if it succeeded. + The default 'None' implies that no authentication happened.""" + + # Note that cherrypy.url uses "if request.app:" to determine whether + # the call is during a real HTTP request or not. So leave this None. + app = None + """The cherrypy.Application object which is handling this request.""" + + handler = None + """ + The function, method, or other callable which CherryPy will call to + produce the response. The discovery of the handler and the arguments + it will receive are determined by the request.dispatch object. + By default, the handler is discovered by walking a tree of objects + starting at request.app.root, and is then passed all HTTP params + (from the query string and POST body) as keyword arguments.""" + + toolmaps = {} + """ + A nested dict of all Toolboxes and Tools in effect for this request, + of the form: {Toolbox.namespace: {Tool.name: config dict}}.""" + + config = None + """ + A flat dict of all configuration entries which apply to the + current request. These entries are collected from global config, + application config (based on request.path_info), and from handler + config (exactly how is governed by the request.dispatch object in + effect for this request; by default, handler config can be attached + anywhere in the tree between request.app.root and the final handler, + and inherits downward).""" + + is_index = None + """ + This will be True if the current request is mapped to an 'index' + resource handler (also, a 'default' handler if path_info ends with + a slash). The value may be used to automatically redirect the + user-agent to a 'more canonical' URL which either adds or removes + the trailing slash. See cherrypy.tools.trailing_slash.""" + + hooks = HookMap(hookpoints) + """ + A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}. + Each key is a str naming the hook point, and each value is a list + of hooks which will be called at that hook point during this request. + The list of hooks is generally populated as early as possible (mostly + from Tools specified in config), but may be extended at any time. + See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools.""" + + error_response = cherrypy.HTTPError(500).set_response + """ + The no-arg callable which will handle unexpected, untrapped errors + during request processing. This is not used for expected exceptions + (like NotFound, HTTPError, or HTTPRedirect) which are raised in + response to expected conditions (those should be customized either + via request.error_page or by overriding HTTPError.set_response). + By default, error_response uses HTTPError(500) to return a generic + error response to the user-agent.""" + + error_page = {} + """ + A dict of {error code: response filename or callable} pairs. + + The error code must be an int representing a given HTTP error code, + or the string 'default', which will be used if no matching entry + is found for a given numeric code. + + If a filename is provided, the file should contain a Python string- + formatting template, and can expect by default to receive format + values with the mapping keys %(status)s, %(message)s, %(traceback)s, + and %(version)s. The set of format mappings can be extended by + overriding HTTPError.set_response. + + If a callable is provided, it will be called by default with keyword + arguments 'status', 'message', 'traceback', and 'version', as for a + string-formatting template. The callable must return a string or + iterable of strings which will be set to response.body. It may also + override headers or perform any other processing. + + If no entry is given for an error code, and no 'default' entry exists, + a default template will be used. + """ + + show_tracebacks = True + """ + If True, unexpected errors encountered during request processing will + include a traceback in the response body.""" + + show_mismatched_params = True + """ + If True, mismatched parameters encountered during PageHandler invocation + processing will be included in the response body.""" + + throws = (KeyboardInterrupt, SystemExit, cherrypy.InternalRedirect) + """The sequence of exceptions which Request.run does not trap.""" + + throw_errors = False + """ + If True, Request.run will not trap any errors (except HTTPRedirect and + HTTPError, which are more properly called 'exceptions', not errors).""" + + closed = False + """True once the close method has been called, False otherwise.""" + + stage = None + """ + A string containing the stage reached in the request-handling process. + This is useful when debugging a live server with hung requests.""" + + namespaces = _cpconfig.NamespaceSet( + **{"hooks": hooks_namespace, + "request": request_namespace, + "response": response_namespace, + "error_page": error_page_namespace, + "tools": cherrypy.tools, + }) + + def __init__(self, local_host, remote_host, scheme="http", + server_protocol="HTTP/1.1"): + """Populate a new Request object. + + local_host should be an httputil.Host object with the server info. + remote_host should be an httputil.Host object with the client info. + scheme should be a string, either "http" or "https". + """ + self.local = local_host + self.remote = remote_host + self.scheme = scheme + self.server_protocol = server_protocol + + self.closed = False + + # Put a *copy* of the class error_page into self. + self.error_page = self.error_page.copy() + + # Put a *copy* of the class namespaces into self. + self.namespaces = self.namespaces.copy() + + self.stage = None + + def close(self): + """Run cleanup code. (Core)""" + if not self.closed: + self.closed = True + self.stage = 'on_end_request' + self.hooks.run('on_end_request') + self.stage = 'close' + + def run(self, method, path, query_string, req_protocol, headers, rfile): + r"""Process the Request. (Core) + + method, path, query_string, and req_protocol should be pulled directly + from the Request-Line (e.g. "GET /path?key=val HTTP/1.0"). + + path + This should be %XX-unquoted, but query_string should not be. + + When using Python 2, they both MUST be byte strings, + not unicode strings. + + When using Python 3, they both MUST be unicode strings, + not byte strings, and preferably not bytes \x00-\xFF + disguised as unicode. + + headers + A list of (name, value) tuples. + + rfile + A file-like object containing the HTTP request entity. + + When run() is done, the returned object should have 3 attributes: + + * status, e.g. "200 OK" + * header_list, a list of (name, value) tuples + * body, an iterable yielding strings + + Consumer code (HTTP servers) should then access these response + attributes to build the outbound stream. + + """ + response = cherrypy.serving.response + self.stage = 'run' + try: + self.error_response = cherrypy.HTTPError(500).set_response + + self.method = method + path = path or "/" + self.query_string = query_string or '' + self.params = {} + + # Compare request and server HTTP protocol versions, in case our + # server does not support the requested protocol. Limit our output + # to min(req, server). We want the following output: + # request server actual written supported response + # protocol protocol response protocol feature set + # a 1.0 1.0 1.0 1.0 + # b 1.0 1.1 1.1 1.0 + # c 1.1 1.0 1.0 1.0 + # d 1.1 1.1 1.1 1.1 + # Notice that, in (b), the response will be "HTTP/1.1" even though + # the client only understands 1.0. RFC 2616 10.5.6 says we should + # only return 505 if the _major_ version is different. + rp = int(req_protocol[5]), int(req_protocol[7]) + sp = int(self.server_protocol[5]), int(self.server_protocol[7]) + self.protocol = min(rp, sp) + response.headers.protocol = self.protocol + + # Rebuild first line of the request (e.g. "GET /path HTTP/1.0"). + url = path + if query_string: + url += '?' + query_string + self.request_line = '%s %s %s' % (method, url, req_protocol) + + self.header_list = list(headers) + self.headers = httputil.HeaderMap() + + self.rfile = rfile + self.body = None + + self.cookie = SimpleCookie() + self.handler = None + + # path_info should be the path from the + # app root (script_name) to the handler. + self.script_name = self.app.script_name + self.path_info = pi = path[len(self.script_name):] + + self.stage = 'respond' + self.respond(pi) + + except self.throws: + raise + except: + if self.throw_errors: + raise + else: + # Failure in setup, error handler or finalize. Bypass them. + # Can't use handle_error because we may not have hooks yet. + cherrypy.log(traceback=True, severity=40) + if self.show_tracebacks: + body = format_exc() + else: + body = "" + r = bare_error(body) + response.output_status, response.header_list, response.body = r + + if self.method == "HEAD": + # HEAD requests MUST NOT return a message-body in the response. + response.body = [] + + try: + cherrypy.log.access() + except: + cherrypy.log.error(traceback=True) + + if response.timed_out: + raise cherrypy.TimeoutError() + + return response + + # Uncomment for stage debugging + # stage = property(lambda self: self._stage, lambda self, v: print(v)) + + def respond(self, path_info): + """Generate a response for the resource at self.path_info. (Core)""" + response = cherrypy.serving.response + try: + try: + try: + if self.app is None: + raise cherrypy.NotFound() + + # Get the 'Host' header, so we can HTTPRedirect properly. + self.stage = 'process_headers' + self.process_headers() + + # Make a copy of the class hooks + self.hooks = self.__class__.hooks.copy() + self.toolmaps = {} + + self.stage = 'get_resource' + self.get_resource(path_info) + + self.body = _cpreqbody.RequestBody( + self.rfile, self.headers, request_params=self.params) + + self.namespaces(self.config) + + self.stage = 'on_start_resource' + self.hooks.run('on_start_resource') + + # Parse the querystring + self.stage = 'process_query_string' + self.process_query_string() + + # Process the body + if self.process_request_body: + if self.method not in self.methods_with_bodies: + self.process_request_body = False + self.stage = 'before_request_body' + self.hooks.run('before_request_body') + if self.process_request_body: + self.body.process() + + # Run the handler + self.stage = 'before_handler' + self.hooks.run('before_handler') + if self.handler: + self.stage = 'handler' + response.body = self.handler() + + # Finalize + self.stage = 'before_finalize' + self.hooks.run('before_finalize') + response.finalize() + except (cherrypy.HTTPRedirect, cherrypy.HTTPError): + inst = sys.exc_info()[1] + inst.set_response() + self.stage = 'before_finalize (HTTPError)' + self.hooks.run('before_finalize') + response.finalize() + finally: + self.stage = 'on_end_resource' + self.hooks.run('on_end_resource') + except self.throws: + raise + except: + if self.throw_errors: + raise + self.handle_error() + + def process_query_string(self): + """Parse the query string into Python structures. (Core)""" + try: + p = httputil.parse_query_string( + self.query_string, encoding=self.query_string_encoding) + except UnicodeDecodeError: + raise cherrypy.HTTPError( + 404, "The given query string could not be processed. Query " + "strings for this resource must be encoded with %r." % + self.query_string_encoding) + + # Python 2 only: keyword arguments must be byte strings (type 'str'). + if six.PY2: + for key, value in p.items(): + if isinstance(key, six.text_type): + del p[key] + p[key.encode(self.query_string_encoding)] = value + self.params.update(p) + + def process_headers(self): + """Parse HTTP header data into Python structures. (Core)""" + # Process the headers into self.headers + headers = self.headers + for name, value in self.header_list: + # Call title() now (and use dict.__method__(headers)) + # so title doesn't have to be called twice. + name = name.title() + value = value.strip() + + # Warning: if there is more than one header entry for cookies + # (AFAIK, only Konqueror does that), only the last one will + # remain in headers (but they will be correctly stored in + # request.cookie). + if "=?" in value: + dict.__setitem__(headers, name, httputil.decode_TEXT(value)) + else: + dict.__setitem__(headers, name, value) + + # Handle cookies differently because on Konqueror, multiple + # cookies come on different lines with the same key + if name == 'Cookie': + try: + self.cookie.load(value) + except CookieError: + msg = "Illegal cookie name %s" % value.split('=')[0] + raise cherrypy.HTTPError(400, msg) + + if not dict.__contains__(headers, 'Host'): + # All Internet-based HTTP/1.1 servers MUST respond with a 400 + # (Bad Request) status code to any HTTP/1.1 request message + # which lacks a Host header field. + if self.protocol >= (1, 1): + msg = "HTTP/1.1 requires a 'Host' request header." + raise cherrypy.HTTPError(400, msg) + host = dict.get(headers, 'Host') + if not host: + host = self.local.name or self.local.ip + self.base = "%s://%s" % (self.scheme, host) + + def get_resource(self, path): + """Call a dispatcher (which sets self.handler and .config). (Core)""" + # First, see if there is a custom dispatch at this URI. Custom + # dispatchers can only be specified in app.config, not in _cp_config + # (since custom dispatchers may not even have an app.root). + dispatch = self.app.find_config( + path, "request.dispatch", self.dispatch) + + # dispatch() should set self.handler and self.config + dispatch(path) + + def handle_error(self): + """Handle the last unanticipated exception. (Core)""" + try: + self.hooks.run("before_error_response") + if self.error_response: + self.error_response() + self.hooks.run("after_error_response") + cherrypy.serving.response.finalize() + except cherrypy.HTTPRedirect: + inst = sys.exc_info()[1] + inst.set_response() + cherrypy.serving.response.finalize() + + # ------------------------- Properties ------------------------- # + + def _get_body_params(self): + warnings.warn( + "body_params is deprecated in CherryPy 3.2, will be removed in " + "CherryPy 3.3.", + DeprecationWarning + ) + return self.body.params + body_params = property(_get_body_params, + doc=""" + If the request Content-Type is 'application/x-www-form-urlencoded' or + multipart, this will be a dict of the params pulled from the entity + body; that is, it will be the portion of request.params that come + from the message body (sometimes called "POST params", although they + can be sent with various HTTP method verbs). This value is set between + the 'before_request_body' and 'before_handler' hooks (assuming that + process_request_body is True). + + Deprecated in 3.2, will be removed for 3.3 in favor of + :attr:`request.body.params`.""") + + +class ResponseBody(object): + + """The body of the HTTP response (the response entity).""" + + if six.PY3: + unicode_err = ("Page handlers MUST return bytes. Use tools.encode " + "if you wish to return unicode.") + + def __get__(self, obj, objclass=None): + if obj is None: + # When calling on the class instead of an instance... + return self + else: + return obj._body + + def __set__(self, obj, value): + # Convert the given value to an iterable object. + if six.PY3 and isinstance(value, str): + raise ValueError(self.unicode_err) + + if isinstance(value, text_or_bytes): + # strings get wrapped in a list because iterating over a single + # item list is much faster than iterating over every character + # in a long string. + if value: + value = [value] + else: + # [''] doesn't evaluate to False, so replace it with []. + value = [] + elif six.PY3 and isinstance(value, list): + # every item in a list must be bytes... + for i, item in enumerate(value): + if isinstance(item, str): + raise ValueError(self.unicode_err) + # Don't use isinstance here; io.IOBase which has an ABC takes + # 1000 times as long as, say, isinstance(value, str) + elif hasattr(value, 'read'): + value = file_generator(value) + elif value is None: + value = [] + obj._body = value + + +class Response(object): + + """An HTTP Response, including status, headers, and body.""" + + status = "" + """The HTTP Status-Code and Reason-Phrase.""" + + header_list = [] + """ + A list of the HTTP response headers as (name, value) tuples. + In general, you should use response.headers (a dict) instead. This + attribute is generated from response.headers and is not valid until + after the finalize phase.""" + + headers = httputil.HeaderMap() + """ + A dict-like object containing the response headers. Keys are header + names (in Title-Case format); however, you may get and set them in + a case-insensitive manner. That is, headers['Content-Type'] and + headers['content-type'] refer to the same value. Values are header + values (decoded according to :rfc:`2047` if necessary). + + .. seealso:: classes :class:`HeaderMap`, :class:`HeaderElement` + """ + + cookie = SimpleCookie() + """See help(Cookie).""" + + body = ResponseBody() + """The body (entity) of the HTTP response.""" + + time = None + """The value of time.time() when created. Use in HTTP dates.""" + + timeout = 300 + """Seconds after which the response will be aborted.""" + + timed_out = False + """ + Flag to indicate the response should be aborted, because it has + exceeded its timeout.""" + + stream = False + """If False, buffer the response body.""" + + def __init__(self): + self.status = None + self.header_list = None + self._body = [] + self.time = time.time() + + self.headers = httputil.HeaderMap() + # Since we know all our keys are titled strings, we can + # bypass HeaderMap.update and get a big speed boost. + dict.update(self.headers, { + "Content-Type": 'text/html', + "Server": "CherryPy/" + cherrypy.__version__, + "Date": httputil.HTTPDate(self.time), + }) + self.cookie = SimpleCookie() + + def collapse_body(self): + """Collapse self.body to a single string; replace it and return it.""" + if isinstance(self.body, text_or_bytes): + return self.body + + newbody = [] + for chunk in self.body: + if six.PY3 and not isinstance(chunk, bytes): + raise TypeError("Chunk %s is not of type 'bytes'." % + repr(chunk)) + newbody.append(chunk) + newbody = ntob('').join(newbody) + + self.body = newbody + return newbody + + def finalize(self): + """Transform headers (and cookies) into self.header_list. (Core)""" + try: + code, reason, _ = httputil.valid_status(self.status) + except ValueError: + raise cherrypy.HTTPError(500, sys.exc_info()[1].args[0]) + + headers = self.headers + + self.status = "%s %s" % (code, reason) + self.output_status = ntob(str(code), 'ascii') + \ + ntob(" ") + headers.encode(reason) + + if self.stream: + # The upshot: wsgiserver will chunk the response if + # you pop Content-Length (or set it explicitly to None). + # Note that lib.static sets C-L to the file's st_size. + if dict.get(headers, 'Content-Length') is None: + dict.pop(headers, 'Content-Length', None) + elif code < 200 or code in (204, 205, 304): + # "All 1xx (informational), 204 (no content), + # and 304 (not modified) responses MUST NOT + # include a message-body." + dict.pop(headers, 'Content-Length', None) + self.body = ntob("") + else: + # Responses which are not streamed should have a Content-Length, + # but allow user code to set Content-Length if desired. + if dict.get(headers, 'Content-Length') is None: + content = self.collapse_body() + dict.__setitem__(headers, 'Content-Length', len(content)) + + # Transform our header dict into a list of tuples. + self.header_list = h = headers.output() + + cookie = self.cookie.output() + if cookie: + for line in cookie.split("\n"): + if line.endswith("\r"): + # Python 2.4 emits cookies joined by LF but 2.5+ by CRLF. + line = line[:-1] + name, value = line.split(": ", 1) + if isinstance(name, six.text_type): + name = name.encode("ISO-8859-1") + if isinstance(value, six.text_type): + value = headers.encode(value) + h.append((name, value)) + + def check_timeout(self): + """If now > self.time + self.timeout, set self.timed_out. + + This purposefully sets a flag, rather than raising an error, + so that a monitor thread can interrupt the Response thread. + """ + if time.time() > self.time + self.timeout: + self.timed_out = True diff --git a/deps/cherrypy/_cpserver.py b/deps/cherrypy/_cpserver.py new file mode 100644 index 00000000..2e6e48de --- /dev/null +++ b/deps/cherrypy/_cpserver.py @@ -0,0 +1,226 @@ +"""Manage HTTP servers with CherryPy.""" + +import six + +import cherrypy +from cherrypy.lib.reprconf import attributes +from cherrypy._cpcompat import text_or_bytes + +# We import * because we want to export check_port +# et al as attributes of this module. +from cherrypy.process.servers import * + + +class Server(ServerAdapter): + + """An adapter for an HTTP server. + + You can set attributes (like socket_host and socket_port) + on *this* object (which is probably cherrypy.server), and call + quickstart. For example:: + + cherrypy.server.socket_port = 80 + cherrypy.quickstart() + """ + + socket_port = 8080 + """The TCP port on which to listen for connections.""" + + _socket_host = '127.0.0.1' + + def _get_socket_host(self): + return self._socket_host + + def _set_socket_host(self, value): + if value == '': + raise ValueError("The empty string ('') is not an allowed value. " + "Use '0.0.0.0' instead to listen on all active " + "interfaces (INADDR_ANY).") + self._socket_host = value + socket_host = property( + _get_socket_host, + _set_socket_host, + doc="""The hostname or IP address on which to listen for connections. + + Host values may be any IPv4 or IPv6 address, or any valid hostname. + The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if + your hosts file prefers IPv6). The string '0.0.0.0' is a special + IPv4 entry meaning "any active interface" (INADDR_ANY), and '::' + is the similar IN6ADDR_ANY for IPv6. The empty string or None are + not allowed.""") + + socket_file = None + """If given, the name of the UNIX socket to use instead of TCP/IP. + + When this option is not None, the `socket_host` and `socket_port` options + are ignored.""" + + socket_queue_size = 5 + """The 'backlog' argument to socket.listen(); specifies the maximum number + of queued connections (default 5).""" + + socket_timeout = 10 + """The timeout in seconds for accepted connections (default 10).""" + + accepted_queue_size = -1 + """The maximum number of requests which will be queued up before + the server refuses to accept it (default -1, meaning no limit).""" + + accepted_queue_timeout = 10 + """The timeout in seconds for attempting to add a request to the + queue when the queue is full (default 10).""" + + shutdown_timeout = 5 + """The time to wait for HTTP worker threads to clean up.""" + + protocol_version = 'HTTP/1.1' + """The version string to write in the Status-Line of all HTTP responses, + for example, "HTTP/1.1" (the default). Depending on the HTTP server used, + this should also limit the supported features used in the response.""" + + thread_pool = 10 + """The number of worker threads to start up in the pool.""" + + thread_pool_max = -1 + """The maximum size of the worker-thread pool. Use -1 to indicate no limit. + """ + + max_request_header_size = 500 * 1024 + """The maximum number of bytes allowable in the request headers. + If exceeded, the HTTP server should return "413 Request Entity Too Large". + """ + + max_request_body_size = 100 * 1024 * 1024 + """The maximum number of bytes allowable in the request body. If exceeded, + the HTTP server should return "413 Request Entity Too Large".""" + + instance = None + """If not None, this should be an HTTP server instance (such as + CPWSGIServer) which cherrypy.server will control. Use this when you need + more control over object instantiation than is available in the various + configuration options.""" + + ssl_context = None + """When using PyOpenSSL, an instance of SSL.Context.""" + + ssl_certificate = None + """The filename of the SSL certificate to use.""" + + ssl_certificate_chain = None + """When using PyOpenSSL, the certificate chain to pass to + Context.load_verify_locations.""" + + ssl_private_key = None + """The filename of the private key to use with SSL.""" + + if six.PY3: + ssl_module = 'builtin' + """The name of a registered SSL adaptation module to use with + the builtin WSGI server. Builtin options are: 'builtin' (to + use the SSL library built into recent versions of Python). + You may also register your own classes in the + wsgiserver.ssl_adapters dict.""" + else: + ssl_module = 'pyopenssl' + """The name of a registered SSL adaptation module to use with the + builtin WSGI server. Builtin options are 'builtin' (to use the SSL + library built into recent versions of Python) and 'pyopenssl' (to + use the PyOpenSSL project, which you must install separately). You + may also register your own classes in the wsgiserver.ssl_adapters + dict.""" + + statistics = False + """Turns statistics-gathering on or off for aware HTTP servers.""" + + nodelay = True + """If True (the default since 3.1), sets the TCP_NODELAY socket option.""" + + wsgi_version = (1, 0) + """The WSGI version tuple to use with the builtin WSGI server. + The provided options are (1, 0) [which includes support for PEP 3333, + which declares it covers WSGI version 1.0.1 but still mandates the + wsgi.version (1, 0)] and ('u', 0), an experimental unicode version. + You may create and register your own experimental versions of the WSGI + protocol by adding custom classes to the wsgiserver.wsgi_gateways dict.""" + + def __init__(self): + self.bus = cherrypy.engine + self.httpserver = None + self.interrupt = None + self.running = False + + def httpserver_from_self(self, httpserver=None): + """Return a (httpserver, bind_addr) pair based on self attributes.""" + if httpserver is None: + httpserver = self.instance + if httpserver is None: + from cherrypy import _cpwsgi_server + httpserver = _cpwsgi_server.CPWSGIServer(self) + if isinstance(httpserver, text_or_bytes): + # Is anyone using this? Can I add an arg? + httpserver = attributes(httpserver)(self) + return httpserver, self.bind_addr + + def start(self): + """Start the HTTP server.""" + if not self.httpserver: + self.httpserver, self.bind_addr = self.httpserver_from_self() + ServerAdapter.start(self) + start.priority = 75 + + def _get_bind_addr(self): + if self.socket_file: + return self.socket_file + if self.socket_host is None and self.socket_port is None: + return None + return (self.socket_host, self.socket_port) + + def _set_bind_addr(self, value): + if value is None: + self.socket_file = None + self.socket_host = None + self.socket_port = None + elif isinstance(value, text_or_bytes): + self.socket_file = value + self.socket_host = None + self.socket_port = None + else: + try: + self.socket_host, self.socket_port = value + self.socket_file = None + except ValueError: + raise ValueError("bind_addr must be a (host, port) tuple " + "(for TCP sockets) or a string (for Unix " + "domain sockets), not %r" % value) + bind_addr = property( + _get_bind_addr, + _set_bind_addr, + doc='A (host, port) tuple for TCP sockets or ' + 'a str for Unix domain sockets.') + + def base(self): + """Return the base (scheme://host[:port] or sock file) for this server. + """ + if self.socket_file: + return self.socket_file + + host = self.socket_host + if host in ('0.0.0.0', '::'): + # 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY. + # Look up the host name, which should be the + # safest thing to spit out in a URL. + import socket + host = socket.gethostname() + + port = self.socket_port + + if self.ssl_certificate: + scheme = "https" + if port != 443: + host += ":%s" % port + else: + scheme = "http" + if port != 80: + host += ":%s" % port + + return "%s://%s" % (scheme, host) diff --git a/deps/cherrypy/_cpthreadinglocal.py b/deps/cherrypy/_cpthreadinglocal.py new file mode 100644 index 00000000..238c3224 --- /dev/null +++ b/deps/cherrypy/_cpthreadinglocal.py @@ -0,0 +1,241 @@ +# This is a backport of Python-2.4's threading.local() implementation + +"""Thread-local objects + +(Note that this module provides a Python version of thread + threading.local class. Depending on the version of Python you're + using, there may be a faster one available. You should always import + the local class from threading.) + +Thread-local objects support the management of thread-local data. +If you have data that you want to be local to a thread, simply create +a thread-local object and use its attributes: + + >>> mydata = local() + >>> mydata.number = 42 + >>> mydata.number + 42 + +You can also access the local-object's dictionary: + + >>> mydata.__dict__ + {'number': 42} + >>> mydata.__dict__.setdefault('widgets', []) + [] + >>> mydata.widgets + [] + +What's important about thread-local objects is that their data are +local to a thread. If we access the data in a different thread: + + >>> log = [] + >>> def f(): + ... items = mydata.__dict__.items() + ... items.sort() + ... log.append(items) + ... mydata.number = 11 + ... log.append(mydata.number) + + >>> import threading + >>> thread = threading.Thread(target=f) + >>> thread.start() + >>> thread.join() + >>> log + [[], 11] + +we get different data. Furthermore, changes made in the other thread +don't affect data seen in this thread: + + >>> mydata.number + 42 + +Of course, values you get from a local object, including a __dict__ +attribute, are for whatever thread was current at the time the +attribute was read. For that reason, you generally don't want to save +these values across threads, as they apply only to the thread they +came from. + +You can create custom local objects by subclassing the local class: + + >>> class MyLocal(local): + ... number = 2 + ... initialized = False + ... def __init__(self, **kw): + ... if self.initialized: + ... raise SystemError('__init__ called too many times') + ... self.initialized = True + ... self.__dict__.update(kw) + ... def squared(self): + ... return self.number ** 2 + +This can be useful to support default values, methods and +initialization. Note that if you define an __init__ method, it will be +called each time the local object is used in a separate thread. This +is necessary to initialize each thread's dictionary. + +Now if we create a local object: + + >>> mydata = MyLocal(color='red') + +Now we have a default number: + + >>> mydata.number + 2 + +an initial color: + + >>> mydata.color + 'red' + >>> del mydata.color + +And a method that operates on the data: + + >>> mydata.squared() + 4 + +As before, we can access the data in a separate thread: + + >>> log = [] + >>> thread = threading.Thread(target=f) + >>> thread.start() + >>> thread.join() + >>> log + [[('color', 'red'), ('initialized', True)], 11] + +without affecting this thread's data: + + >>> mydata.number + 2 + >>> mydata.color + Traceback (most recent call last): + ... + AttributeError: 'MyLocal' object has no attribute 'color' + +Note that subclasses can define slots, but they are not thread +local. They are shared across threads: + + >>> class MyLocal(local): + ... __slots__ = 'number' + + >>> mydata = MyLocal() + >>> mydata.number = 42 + >>> mydata.color = 'red' + +So, the separate thread: + + >>> thread = threading.Thread(target=f) + >>> thread.start() + >>> thread.join() + +affects what we see: + + >>> mydata.number + 11 + +>>> del mydata +""" + +# Threading import is at end + + +class _localbase(object): + __slots__ = '_local__key', '_local__args', '_local__lock' + + def __new__(cls, *args, **kw): + self = object.__new__(cls) + key = 'thread.local.' + str(id(self)) + object.__setattr__(self, '_local__key', key) + object.__setattr__(self, '_local__args', (args, kw)) + object.__setattr__(self, '_local__lock', RLock()) + + if args or kw and (cls.__init__ is object.__init__): + raise TypeError("Initialization arguments are not supported") + + # We need to create the thread dict in anticipation of + # __init__ being called, to make sure we don't call it + # again ourselves. + dict = object.__getattribute__(self, '__dict__') + currentThread().__dict__[key] = dict + + return self + + +def _patch(self): + key = object.__getattribute__(self, '_local__key') + d = currentThread().__dict__.get(key) + if d is None: + d = {} + currentThread().__dict__[key] = d + object.__setattr__(self, '__dict__', d) + + # we have a new instance dict, so call out __init__ if we have + # one + cls = type(self) + if cls.__init__ is not object.__init__: + args, kw = object.__getattribute__(self, '_local__args') + cls.__init__(self, *args, **kw) + else: + object.__setattr__(self, '__dict__', d) + + +class local(_localbase): + + def __getattribute__(self, name): + lock = object.__getattribute__(self, '_local__lock') + lock.acquire() + try: + _patch(self) + return object.__getattribute__(self, name) + finally: + lock.release() + + def __setattr__(self, name, value): + lock = object.__getattribute__(self, '_local__lock') + lock.acquire() + try: + _patch(self) + return object.__setattr__(self, name, value) + finally: + lock.release() + + def __delattr__(self, name): + lock = object.__getattribute__(self, '_local__lock') + lock.acquire() + try: + _patch(self) + return object.__delattr__(self, name) + finally: + lock.release() + + def __del__(): + threading_enumerate = enumerate + __getattribute__ = object.__getattribute__ + + def __del__(self): + key = __getattribute__(self, '_local__key') + + try: + threads = list(threading_enumerate()) + except: + # if enumerate fails, as it seems to do during + # shutdown, we'll skip cleanup under the assumption + # that there is nothing to clean up + return + + for thread in threads: + try: + __dict__ = thread.__dict__ + except AttributeError: + # Thread is dying, rest in peace + continue + + if key in __dict__: + try: + del __dict__[key] + except KeyError: + pass # didn't have anything in this thread + + return __del__ + __del__ = __del__() + +from threading import currentThread, enumerate, RLock diff --git a/deps/cherrypy/_cptools.py b/deps/cherrypy/_cptools.py new file mode 100644 index 00000000..060ca76f --- /dev/null +++ b/deps/cherrypy/_cptools.py @@ -0,0 +1,538 @@ +"""CherryPy tools. A "tool" is any helper, adapted to CP. + +Tools are usually designed to be used in a variety of ways (although some +may only offer one if they choose): + + Library calls + All tools are callables that can be used wherever needed. + The arguments are straightforward and should be detailed within the + docstring. + + Function decorators + All tools, when called, may be used as decorators which configure + individual CherryPy page handlers (methods on the CherryPy tree). + That is, "@tools.anytool()" should "turn on" the tool via the + decorated function's _cp_config attribute. + + CherryPy config + If a tool exposes a "_setup" callable, it will be called + once per Request (if the feature is "turned on" via config). + +Tools may be implemented as any object with a namespace. The builtins +are generally either modules or instances of the tools.Tool class. +""" + +import sys +import warnings + +import cherrypy +from cherrypy._helper import expose + + +def _getargs(func): + """Return the names of all static arguments to the given function.""" + # Use this instead of importing inspect for less mem overhead. + import types + if sys.version_info >= (3, 0): + if isinstance(func, types.MethodType): + func = func.__func__ + co = func.__code__ + else: + if isinstance(func, types.MethodType): + func = func.im_func + co = func.func_code + return co.co_varnames[:co.co_argcount] + + +_attr_error = ( + "CherryPy Tools cannot be turned on directly. Instead, turn them " + "on via config, or use them as decorators on your page handlers." +) + + +class Tool(object): + + """A registered function for use with CherryPy request-processing hooks. + + help(tool.callable) should give you more information about this Tool. + """ + + namespace = "tools" + + def __init__(self, point, callable, name=None, priority=50): + self._point = point + self.callable = callable + self._name = name + self._priority = priority + self.__doc__ = self.callable.__doc__ + self._setargs() + + def _get_on(self): + raise AttributeError(_attr_error) + + def _set_on(self, value): + raise AttributeError(_attr_error) + on = property(_get_on, _set_on) + + def _setargs(self): + """Copy func parameter names to obj attributes.""" + try: + for arg in _getargs(self.callable): + setattr(self, arg, None) + except (TypeError, AttributeError): + if hasattr(self.callable, "__call__"): + for arg in _getargs(self.callable.__call__): + setattr(self, arg, None) + # IronPython 1.0 raises NotImplementedError because + # inspect.getargspec tries to access Python bytecode + # in co_code attribute. + except NotImplementedError: + pass + # IronPython 1B1 may raise IndexError in some cases, + # but if we trap it here it doesn't prevent CP from working. + except IndexError: + pass + + def _merged_args(self, d=None): + """Return a dict of configuration entries for this Tool.""" + if d: + conf = d.copy() + else: + conf = {} + + tm = cherrypy.serving.request.toolmaps[self.namespace] + if self._name in tm: + conf.update(tm[self._name]) + + if "on" in conf: + del conf["on"] + + return conf + + def __call__(self, *args, **kwargs): + """Compile-time decorator (turn on the tool in config). + + For example:: + + @expose + @tools.proxy() + def whats_my_base(self): + return cherrypy.request.base + """ + if args: + raise TypeError("The %r Tool does not accept positional " + "arguments; you must use keyword arguments." + % self._name) + + def tool_decorator(f): + if not hasattr(f, "_cp_config"): + f._cp_config = {} + subspace = self.namespace + "." + self._name + "." + f._cp_config[subspace + "on"] = True + for k, v in kwargs.items(): + f._cp_config[subspace + k] = v + return f + return tool_decorator + + def _setup(self): + """Hook this tool into cherrypy.request. + + The standard CherryPy request object will automatically call this + method when the tool is "turned on" in config. + """ + conf = self._merged_args() + p = conf.pop("priority", None) + if p is None: + p = getattr(self.callable, "priority", self._priority) + cherrypy.serving.request.hooks.attach(self._point, self.callable, + priority=p, **conf) + + +class HandlerTool(Tool): + + """Tool which is called 'before main', that may skip normal handlers. + + If the tool successfully handles the request (by setting response.body), + if should return True. This will cause CherryPy to skip any 'normal' page + handler. If the tool did not handle the request, it should return False + to tell CherryPy to continue on and call the normal page handler. If the + tool is declared AS a page handler (see the 'handler' method), returning + False will raise NotFound. + """ + + def __init__(self, callable, name=None): + Tool.__init__(self, 'before_handler', callable, name) + + def handler(self, *args, **kwargs): + """Use this tool as a CherryPy page handler. + + For example:: + + class Root: + nav = tools.staticdir.handler(section="/nav", dir="nav", + root=absDir) + """ + @expose + def handle_func(*a, **kw): + handled = self.callable(*args, **self._merged_args(kwargs)) + if not handled: + raise cherrypy.NotFound() + return cherrypy.serving.response.body + return handle_func + + def _wrapper(self, **kwargs): + if self.callable(**kwargs): + cherrypy.serving.request.handler = None + + def _setup(self): + """Hook this tool into cherrypy.request. + + The standard CherryPy request object will automatically call this + method when the tool is "turned on" in config. + """ + conf = self._merged_args() + p = conf.pop("priority", None) + if p is None: + p = getattr(self.callable, "priority", self._priority) + cherrypy.serving.request.hooks.attach(self._point, self._wrapper, + priority=p, **conf) + + +class HandlerWrapperTool(Tool): + + """Tool which wraps request.handler in a provided wrapper function. + + The 'newhandler' arg must be a handler wrapper function that takes a + 'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all + page handler + functions, it must return an iterable for use as cherrypy.response.body. + + For example, to allow your 'inner' page handlers to return dicts + which then get interpolated into a template:: + + def interpolator(next_handler, *args, **kwargs): + filename = cherrypy.request.config.get('template') + cherrypy.response.template = env.get_template(filename) + response_dict = next_handler(*args, **kwargs) + return cherrypy.response.template.render(**response_dict) + cherrypy.tools.jinja = HandlerWrapperTool(interpolator) + """ + + def __init__(self, newhandler, point='before_handler', name=None, + priority=50): + self.newhandler = newhandler + self._point = point + self._name = name + self._priority = priority + + def callable(self, *args, **kwargs): + innerfunc = cherrypy.serving.request.handler + + def wrap(*args, **kwargs): + return self.newhandler(innerfunc, *args, **kwargs) + cherrypy.serving.request.handler = wrap + + +class ErrorTool(Tool): + + """Tool which is used to replace the default request.error_response.""" + + def __init__(self, callable, name=None): + Tool.__init__(self, None, callable, name) + + def _wrapper(self): + self.callable(**self._merged_args()) + + def _setup(self): + """Hook this tool into cherrypy.request. + + The standard CherryPy request object will automatically call this + method when the tool is "turned on" in config. + """ + cherrypy.serving.request.error_response = self._wrapper + + +# Builtin tools # + +from cherrypy.lib import cptools, encoding, auth, static, jsontools +from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc +from cherrypy.lib import caching as _caching +from cherrypy.lib import auth_basic, auth_digest + + +class SessionTool(Tool): + + """Session Tool for CherryPy. + + sessions.locking + When 'implicit' (the default), the session will be locked for you, + just before running the page handler. + + When 'early', the session will be locked before reading the request + body. This is off by default for safety reasons; for example, + a large upload would block the session, denying an AJAX + progress meter + (`issue `_). + + When 'explicit' (or any other value), you need to call + cherrypy.session.acquire_lock() yourself before using + session data. + """ + + def __init__(self): + # _sessions.init must be bound after headers are read + Tool.__init__(self, 'before_request_body', _sessions.init) + + def _lock_session(self): + cherrypy.serving.session.acquire_lock() + + def _setup(self): + """Hook this tool into cherrypy.request. + + The standard CherryPy request object will automatically call this + method when the tool is "turned on" in config. + """ + hooks = cherrypy.serving.request.hooks + + conf = self._merged_args() + + p = conf.pop("priority", None) + if p is None: + p = getattr(self.callable, "priority", self._priority) + + hooks.attach(self._point, self.callable, priority=p, **conf) + + locking = conf.pop('locking', 'implicit') + if locking == 'implicit': + hooks.attach('before_handler', self._lock_session) + elif locking == 'early': + # Lock before the request body (but after _sessions.init runs!) + hooks.attach('before_request_body', self._lock_session, + priority=60) + else: + # Don't lock + pass + + hooks.attach('before_finalize', _sessions.save) + hooks.attach('on_end_request', _sessions.close) + + def regenerate(self): + """Drop the current session and make a new one (with a new id).""" + sess = cherrypy.serving.session + sess.regenerate() + + # Grab cookie-relevant tool args + conf = dict([(k, v) for k, v in self._merged_args().items() + if k in ('path', 'path_header', 'name', 'timeout', + 'domain', 'secure')]) + _sessions.set_response_cookie(**conf) + + +class XMLRPCController(object): + + """A Controller (page handler collection) for XML-RPC. + + To use it, have your controllers subclass this base class (it will + turn on the tool for you). + + You can also supply the following optional config entries:: + + tools.xmlrpc.encoding: 'utf-8' + tools.xmlrpc.allow_none: 0 + + XML-RPC is a rather discontinuous layer over HTTP; dispatching to the + appropriate handler must first be performed according to the URL, and + then a second dispatch step must take place according to the RPC method + specified in the request body. It also allows a superfluous "/RPC2" + prefix in the URL, supplies its own handler args in the body, and + requires a 200 OK "Fault" response instead of 404 when the desired + method is not found. + + Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone. + This Controller acts as the dispatch target for the first half (based + on the URL); it then reads the RPC method from the request body and + does its own second dispatch step based on that method. It also reads + body params, and returns a Fault on error. + + The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2 + in your URL's, you can safely skip turning on the XMLRPCDispatcher. + Otherwise, you need to use declare it in config:: + + request.dispatch: cherrypy.dispatch.XMLRPCDispatcher() + """ + + # Note we're hard-coding this into the 'tools' namespace. We could do + # a huge amount of work to make it relocatable, but the only reason why + # would be if someone actually disabled the default_toolbox. Meh. + _cp_config = {'tools.xmlrpc.on': True} + + @expose + def default(self, *vpath, **params): + rpcparams, rpcmethod = _xmlrpc.process_body() + + subhandler = self + for attr in str(rpcmethod).split('.'): + subhandler = getattr(subhandler, attr, None) + + if subhandler and getattr(subhandler, "exposed", False): + body = subhandler(*(vpath + rpcparams), **params) + + else: + # https://github.com/cherrypy/cherrypy/issues/533 + # if a method is not found, an xmlrpclib.Fault should be returned + # raising an exception here will do that; see + # cherrypy.lib.xmlrpcutil.on_error + raise Exception('method "%s" is not supported' % attr) + + conf = cherrypy.serving.request.toolmaps['tools'].get("xmlrpc", {}) + _xmlrpc.respond(body, + conf.get('encoding', 'utf-8'), + conf.get('allow_none', 0)) + return cherrypy.serving.response.body + + +class SessionAuthTool(HandlerTool): + + def _setargs(self): + for name in dir(cptools.SessionAuth): + if not name.startswith("__"): + setattr(self, name, None) + + +class CachingTool(Tool): + + """Caching Tool for CherryPy.""" + + def _wrapper(self, **kwargs): + request = cherrypy.serving.request + if _caching.get(**kwargs): + request.handler = None + else: + if request.cacheable: + # Note the devious technique here of adding hooks on the fly + request.hooks.attach('before_finalize', _caching.tee_output, + priority=90) + _wrapper.priority = 20 + + def _setup(self): + """Hook caching into cherrypy.request.""" + conf = self._merged_args() + + p = conf.pop("priority", None) + cherrypy.serving.request.hooks.attach('before_handler', self._wrapper, + priority=p, **conf) + + +class Toolbox(object): + + """A collection of Tools. + + This object also functions as a config namespace handler for itself. + Custom toolboxes should be added to each Application's toolboxes dict. + """ + + def __init__(self, namespace): + self.namespace = namespace + + def __setattr__(self, name, value): + # If the Tool._name is None, supply it from the attribute name. + if isinstance(value, Tool): + if value._name is None: + value._name = name + value.namespace = self.namespace + object.__setattr__(self, name, value) + + def __enter__(self): + """Populate request.toolmaps from tools specified in config.""" + cherrypy.serving.request.toolmaps[self.namespace] = map = {} + + def populate(k, v): + toolname, arg = k.split(".", 1) + bucket = map.setdefault(toolname, {}) + bucket[arg] = v + return populate + + def __exit__(self, exc_type, exc_val, exc_tb): + """Run tool._setup() for each tool in our toolmap.""" + map = cherrypy.serving.request.toolmaps.get(self.namespace) + if map: + for name, settings in map.items(): + if settings.get("on", False): + tool = getattr(self, name) + tool._setup() + + def register(self, point, **kwargs): + """Return a decorator which registers the function at the given hook point.""" + def decorator(func): + setattr(self, kwargs.get('name', func.__name__), Tool(point, func, **kwargs)) + return func + return decorator + + +class DeprecatedTool(Tool): + + _name = None + warnmsg = "This Tool is deprecated." + + def __init__(self, point, warnmsg=None): + self.point = point + if warnmsg is not None: + self.warnmsg = warnmsg + + def __call__(self, *args, **kwargs): + warnings.warn(self.warnmsg) + + def tool_decorator(f): + return f + return tool_decorator + + def _setup(self): + warnings.warn(self.warnmsg) + + +default_toolbox = _d = Toolbox("tools") +_d.session_auth = SessionAuthTool(cptools.session_auth) +_d.allow = Tool('on_start_resource', cptools.allow) +_d.proxy = Tool('before_request_body', cptools.proxy, priority=30) +_d.response_headers = Tool('on_start_resource', cptools.response_headers) +_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback) +_d.log_headers = Tool('before_error_response', cptools.log_request_headers) +_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100) +_d.err_redirect = ErrorTool(cptools.redirect) +_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75) +_d.decode = Tool('before_request_body', encoding.decode) +# the order of encoding, gzip, caching is important +_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70) +_d.gzip = Tool('before_finalize', encoding.gzip, priority=80) +_d.staticdir = HandlerTool(static.staticdir) +_d.staticfile = HandlerTool(static.staticfile) +_d.sessions = SessionTool() +_d.xmlrpc = ErrorTool(_xmlrpc.on_error) +_d.caching = CachingTool('before_handler', _caching.get, 'caching') +_d.expires = Tool('before_finalize', _caching.expires) +_d.tidy = DeprecatedTool( + 'before_finalize', + "The tidy tool has been removed from the standard distribution of " + "CherryPy. The most recent version can be found at " + "http://tools.cherrypy.org/browser.") +_d.nsgmls = DeprecatedTool( + 'before_finalize', + "The nsgmls tool has been removed from the standard distribution of " + "CherryPy. The most recent version can be found at " + "http://tools.cherrypy.org/browser.") +_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers) +_d.referer = Tool('before_request_body', cptools.referer) +_d.basic_auth = Tool('on_start_resource', auth.basic_auth) +_d.digest_auth = Tool('on_start_resource', auth.digest_auth) +_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60) +_d.flatten = Tool('before_finalize', cptools.flatten) +_d.accept = Tool('on_start_resource', cptools.accept) +_d.redirect = Tool('on_start_resource', cptools.redirect) +_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0) +_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30) +_d.json_out = Tool('before_handler', jsontools.json_out, priority=30) +_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1) +_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1) +_d.params = Tool('before_handler', cptools.convert_params) + +del _d, cptools, encoding, auth, static diff --git a/deps/cherrypy/_cptree.py b/deps/cherrypy/_cptree.py new file mode 100644 index 00000000..c40e3b33 --- /dev/null +++ b/deps/cherrypy/_cptree.py @@ -0,0 +1,287 @@ +"""CherryPy Application and Tree objects.""" + +import os + +import six + +import cherrypy +from cherrypy._cpcompat import ntou +from cherrypy import _cpconfig, _cplogging, _cprequest, _cpwsgi, tools +from cherrypy.lib import httputil + + +class Application(object): + + """A CherryPy Application. + + Servers and gateways should not instantiate Request objects directly. + Instead, they should ask an Application object for a request object. + + An instance of this class may also be used as a WSGI callable + (WSGI application object) for itself. + """ + + root = None + """The top-most container of page handlers for this app. Handlers should + be arranged in a hierarchy of attributes, matching the expected URI + hierarchy; the default dispatcher then searches this hierarchy for a + matching handler. When using a dispatcher other than the default, + this value may be None.""" + + config = {} + """A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict + of {key: value} pairs.""" + + namespaces = _cpconfig.NamespaceSet() + toolboxes = {'tools': cherrypy.tools} + + log = None + """A LogManager instance. See _cplogging.""" + + wsgiapp = None + """A CPWSGIApp instance. See _cpwsgi.""" + + request_class = _cprequest.Request + response_class = _cprequest.Response + + relative_urls = False + + def __init__(self, root, script_name="", config=None): + self.log = _cplogging.LogManager(id(self), cherrypy.log.logger_root) + self.root = root + self.script_name = script_name + self.wsgiapp = _cpwsgi.CPWSGIApp(self) + + self.namespaces = self.namespaces.copy() + self.namespaces["log"] = lambda k, v: setattr(self.log, k, v) + self.namespaces["wsgi"] = self.wsgiapp.namespace_handler + + self.config = self.__class__.config.copy() + if config: + self.merge(config) + + def __repr__(self): + return "%s.%s(%r, %r)" % (self.__module__, self.__class__.__name__, + self.root, self.script_name) + + script_name_doc = """The URI "mount point" for this app. A mount point + is that portion of the URI which is constant for all URIs that are + serviced by this application; it does not include scheme, host, or proxy + ("virtual host") portions of the URI. + + For example, if script_name is "/my/cool/app", then the URL + "http://www.example.com/my/cool/app/page1" might be handled by a + "page1" method on the root object. + + The value of script_name MUST NOT end in a slash. If the script_name + refers to the root of the URI, it MUST be an empty string (not "/"). + + If script_name is explicitly set to None, then the script_name will be + provided for each call from request.wsgi_environ['SCRIPT_NAME']. + """ + + def _get_script_name(self): + if self._script_name is not None: + return self._script_name + + # A `_script_name` with a value of None signals that the script name + # should be pulled from WSGI environ. + return cherrypy.serving.request.wsgi_environ['SCRIPT_NAME'].rstrip("/") + + def _set_script_name(self, value): + if value: + value = value.rstrip("/") + self._script_name = value + script_name = property(fget=_get_script_name, fset=_set_script_name, + doc=script_name_doc) + + def merge(self, config): + """Merge the given config into self.config.""" + _cpconfig.merge(self.config, config) + + # Handle namespaces specified in config. + self.namespaces(self.config.get("/", {})) + + def find_config(self, path, key, default=None): + """Return the most-specific value for key along path, or default.""" + trail = path or "/" + while trail: + nodeconf = self.config.get(trail, {}) + + if key in nodeconf: + return nodeconf[key] + + lastslash = trail.rfind("/") + if lastslash == -1: + break + elif lastslash == 0 and trail != "/": + trail = "/" + else: + trail = trail[:lastslash] + + return default + + def get_serving(self, local, remote, scheme, sproto): + """Create and return a Request and Response object.""" + req = self.request_class(local, remote, scheme, sproto) + req.app = self + + for name, toolbox in self.toolboxes.items(): + req.namespaces[name] = toolbox + + resp = self.response_class() + cherrypy.serving.load(req, resp) + cherrypy.engine.publish('acquire_thread') + cherrypy.engine.publish('before_request') + + return req, resp + + def release_serving(self): + """Release the current serving (request and response).""" + req = cherrypy.serving.request + + cherrypy.engine.publish('after_request') + + try: + req.close() + except: + cherrypy.log(traceback=True, severity=40) + + cherrypy.serving.clear() + + def __call__(self, environ, start_response): + return self.wsgiapp(environ, start_response) + + +class Tree(object): + + """A registry of CherryPy applications, mounted at diverse points. + + An instance of this class may also be used as a WSGI callable + (WSGI application object), in which case it dispatches to all + mounted apps. + """ + + apps = {} + """ + A dict of the form {script name: application}, where "script name" + is a string declaring the URI mount point (no trailing slash), and + "application" is an instance of cherrypy.Application (or an arbitrary + WSGI callable if you happen to be using a WSGI server).""" + + def __init__(self): + self.apps = {} + + def mount(self, root, script_name="", config=None): + """Mount a new app from a root object, script_name, and config. + + root + An instance of a "controller class" (a collection of page + handler methods) which represents the root of the application. + This may also be an Application instance, or None if using + a dispatcher other than the default. + + script_name + A string containing the "mount point" of the application. + This should start with a slash, and be the path portion of the + URL at which to mount the given root. For example, if root.index() + will handle requests to "http://www.example.com:8080/dept/app1/", + then the script_name argument would be "/dept/app1". + + It MUST NOT end in a slash. If the script_name refers to the + root of the URI, it MUST be an empty string (not "/"). + + config + A file or dict containing application config. + """ + if script_name is None: + raise TypeError( + "The 'script_name' argument may not be None. Application " + "objects may, however, possess a script_name of None (in " + "order to inpect the WSGI environ for SCRIPT_NAME upon each " + "request). You cannot mount such Applications on this Tree; " + "you must pass them to a WSGI server interface directly.") + + # Next line both 1) strips trailing slash and 2) maps "/" -> "". + script_name = script_name.rstrip("/") + + if isinstance(root, Application): + app = root + if script_name != "" and script_name != app.script_name: + raise ValueError( + "Cannot specify a different script name and pass an " + "Application instance to cherrypy.mount") + script_name = app.script_name + else: + app = Application(root, script_name) + + # If mounted at "", add favicon.ico + if (script_name == "" and root is not None + and not hasattr(root, "favicon_ico")): + favicon = os.path.join(os.getcwd(), os.path.dirname(__file__), + "favicon.ico") + root.favicon_ico = tools.staticfile.handler(favicon) + + if config: + app.merge(config) + + self.apps[script_name] = app + + return app + + def graft(self, wsgi_callable, script_name=""): + """Mount a wsgi callable at the given script_name.""" + # Next line both 1) strips trailing slash and 2) maps "/" -> "". + script_name = script_name.rstrip("/") + self.apps[script_name] = wsgi_callable + + def script_name(self, path=None): + """The script_name of the app at the given path, or None. + + If path is None, cherrypy.request is used. + """ + if path is None: + try: + request = cherrypy.serving.request + path = httputil.urljoin(request.script_name, + request.path_info) + except AttributeError: + return None + + while True: + if path in self.apps: + return path + + if path == "": + return None + + # Move one node up the tree and try again. + path = path[:path.rfind("/")] + + def __call__(self, environ, start_response): + # If you're calling this, then you're probably setting SCRIPT_NAME + # to '' (some WSGI servers always set SCRIPT_NAME to ''). + # Try to look up the app using the full path. + env1x = environ + if six.PY2 and environ.get(ntou('wsgi.version')) == (ntou('u'), 0): + env1x = _cpwsgi.downgrade_wsgi_ux_to_1x(environ) + path = httputil.urljoin(env1x.get('SCRIPT_NAME', ''), + env1x.get('PATH_INFO', '')) + sn = self.script_name(path or "/") + if sn is None: + start_response('404 Not Found', []) + return [] + + app = self.apps[sn] + + # Correct the SCRIPT_NAME and PATH_INFO environ entries. + environ = environ.copy() + if six.PY2 and environ.get(ntou('wsgi.version')) == (ntou('u'), 0): + # Python 2/WSGI u.0: all strings MUST be of type unicode + enc = environ[ntou('wsgi.url_encoding')] + environ[ntou('SCRIPT_NAME')] = sn.decode(enc) + environ[ntou('PATH_INFO')] = path[len(sn.rstrip("/")):].decode(enc) + else: + environ['SCRIPT_NAME'] = sn + environ['PATH_INFO'] = path[len(sn.rstrip("/")):] + return app(environ, start_response) diff --git a/deps/cherrypy/_cpwsgi.py b/deps/cherrypy/_cpwsgi.py new file mode 100644 index 00000000..29543d92 --- /dev/null +++ b/deps/cherrypy/_cpwsgi.py @@ -0,0 +1,467 @@ +"""WSGI interface (see PEP 333 and 3333). + +Note that WSGI environ keys and values are 'native strings'; that is, +whatever the type of "" is. For Python 2, that's a byte string; for Python 3, +it's a unicode string. But PEP 3333 says: "even if Python's str type is +actually Unicode "under the hood", the content of native strings must +still be translatable to bytes via the Latin-1 encoding!" +""" + +import sys as _sys +import io + +import six + +import cherrypy as _cherrypy +from cherrypy._cpcompat import ntob, ntou +from cherrypy import _cperror +from cherrypy.lib import httputil +from cherrypy.lib import is_closable_iterator + +def downgrade_wsgi_ux_to_1x(environ): + """Return a new environ dict for WSGI 1.x from the given WSGI u.x environ. + """ + env1x = {} + + url_encoding = environ[ntou('wsgi.url_encoding')] + for k, v in list(environ.items()): + if k in [ntou('PATH_INFO'), ntou('SCRIPT_NAME'), ntou('QUERY_STRING')]: + v = v.encode(url_encoding) + elif isinstance(v, six.text_type): + v = v.encode('ISO-8859-1') + env1x[k.encode('ISO-8859-1')] = v + + return env1x + + +class VirtualHost(object): + + """Select a different WSGI application based on the Host header. + + This can be useful when running multiple sites within one CP server. + It allows several domains to point to different applications. For example:: + + root = Root() + RootApp = cherrypy.Application(root) + Domain2App = cherrypy.Application(root) + SecureApp = cherrypy.Application(Secure()) + + vhost = cherrypy._cpwsgi.VirtualHost( + RootApp, + domains={ + 'www.domain2.example': Domain2App, + 'www.domain2.example:443': SecureApp, + }, + ) + + cherrypy.tree.graft(vhost) + """ + default = None + """Required. The default WSGI application.""" + + use_x_forwarded_host = True + """If True (the default), any "X-Forwarded-Host" + request header will be used instead of the "Host" header. This + is commonly added by HTTP servers (such as Apache) when proxying.""" + + domains = {} + """A dict of {host header value: application} pairs. + The incoming "Host" request header is looked up in this dict, + and, if a match is found, the corresponding WSGI application + will be called instead of the default. Note that you often need + separate entries for "example.com" and "www.example.com". + In addition, "Host" headers may contain the port number. + """ + + def __init__(self, default, domains=None, use_x_forwarded_host=True): + self.default = default + self.domains = domains or {} + self.use_x_forwarded_host = use_x_forwarded_host + + def __call__(self, environ, start_response): + domain = environ.get('HTTP_HOST', '') + if self.use_x_forwarded_host: + domain = environ.get("HTTP_X_FORWARDED_HOST", domain) + + nextapp = self.domains.get(domain) + if nextapp is None: + nextapp = self.default + return nextapp(environ, start_response) + + +class InternalRedirector(object): + + """WSGI middleware that handles raised cherrypy.InternalRedirect.""" + + def __init__(self, nextapp, recursive=False): + self.nextapp = nextapp + self.recursive = recursive + + def __call__(self, environ, start_response): + redirections = [] + while True: + environ = environ.copy() + try: + return self.nextapp(environ, start_response) + except _cherrypy.InternalRedirect: + ir = _sys.exc_info()[1] + sn = environ.get('SCRIPT_NAME', '') + path = environ.get('PATH_INFO', '') + qs = environ.get('QUERY_STRING', '') + + # Add the *previous* path_info + qs to redirections. + old_uri = sn + path + if qs: + old_uri += "?" + qs + redirections.append(old_uri) + + if not self.recursive: + # Check to see if the new URI has been redirected to + # already + new_uri = sn + ir.path + if ir.query_string: + new_uri += "?" + ir.query_string + if new_uri in redirections: + ir.request.close() + tmpl = ( + "InternalRedirector visited the same URL twice: %r" + ) + raise RuntimeError(tmpl % new_uri) + + # Munge the environment and try again. + environ['REQUEST_METHOD'] = "GET" + environ['PATH_INFO'] = ir.path + environ['QUERY_STRING'] = ir.query_string + environ['wsgi.input'] = io.BytesIO() + environ['CONTENT_LENGTH'] = "0" + environ['cherrypy.previous_request'] = ir.request + + +class ExceptionTrapper(object): + + """WSGI middleware that traps exceptions.""" + + def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)): + self.nextapp = nextapp + self.throws = throws + + def __call__(self, environ, start_response): + return _TrappedResponse( + self.nextapp, + environ, + start_response, + self.throws + ) + + +class _TrappedResponse(object): + + response = iter([]) + + def __init__(self, nextapp, environ, start_response, throws): + self.nextapp = nextapp + self.environ = environ + self.start_response = start_response + self.throws = throws + self.started_response = False + self.response = self.trap( + self.nextapp, self.environ, self.start_response, + ) + self.iter_response = iter(self.response) + + def __iter__(self): + self.started_response = True + return self + + def __next__(self): + return self.trap(next, self.iter_response) + + # todo: https://pythonhosted.org/six/#six.Iterator + if six.PY2: + next = __next__ + + def close(self): + if hasattr(self.response, 'close'): + self.response.close() + + def trap(self, func, *args, **kwargs): + try: + return func(*args, **kwargs) + except self.throws: + raise + except StopIteration: + raise + except: + tb = _cperror.format_exc() + #print('trapped (started %s):' % self.started_response, tb) + _cherrypy.log(tb, severity=40) + if not _cherrypy.request.show_tracebacks: + tb = "" + s, h, b = _cperror.bare_error(tb) + if six.PY3: + # What fun. + s = s.decode('ISO-8859-1') + h = [ + (k.decode('ISO-8859-1'), v.decode('ISO-8859-1')) + for k, v in h + ] + if self.started_response: + # Empty our iterable (so future calls raise StopIteration) + self.iter_response = iter([]) + else: + self.iter_response = iter(b) + + try: + self.start_response(s, h, _sys.exc_info()) + except: + # "The application must not trap any exceptions raised by + # start_response, if it called start_response with exc_info. + # Instead, it should allow such exceptions to propagate + # back to the server or gateway." + # But we still log and call close() to clean up ourselves. + _cherrypy.log(traceback=True, severity=40) + raise + + if self.started_response: + return ntob("").join(b) + else: + return b + + +# WSGI-to-CP Adapter # + + +class AppResponse(object): + + """WSGI response iterable for CherryPy applications.""" + + def __init__(self, environ, start_response, cpapp): + self.cpapp = cpapp + try: + if six.PY2: + if environ.get(ntou('wsgi.version')) == (ntou('u'), 0): + environ = downgrade_wsgi_ux_to_1x(environ) + self.environ = environ + self.run() + + r = _cherrypy.serving.response + + outstatus = r.output_status + if not isinstance(outstatus, bytes): + raise TypeError("response.output_status is not a byte string.") + + outheaders = [] + for k, v in r.header_list: + if not isinstance(k, bytes): + tmpl = "response.header_list key %r is not a byte string." + raise TypeError(tmpl % k) + if not isinstance(v, bytes): + tmpl = ( + "response.header_list value %r is not a byte string." + ) + raise TypeError(tmpl % v) + outheaders.append((k, v)) + + if six.PY3: + # According to PEP 3333, when using Python 3, the response + # status and headers must be bytes masquerading as unicode; + # that is, they must be of type "str" but are restricted to + # code points in the "latin-1" set. + outstatus = outstatus.decode('ISO-8859-1') + outheaders = [ + (k.decode('ISO-8859-1'), v.decode('ISO-8859-1')) + for k, v in outheaders + ] + + self.iter_response = iter(r.body) + self.write = start_response(outstatus, outheaders) + except: + self.close() + raise + + def __iter__(self): + return self + + def __next__(self): + return next(self.iter_response) + + # todo: https://pythonhosted.org/six/#six.Iterator + if six.PY2: + next = __next__ + + def close(self): + """Close and de-reference the current request and response. (Core)""" + streaming = _cherrypy.serving.response.stream + self.cpapp.release_serving() + + # We avoid the expense of examining the iterator to see if it's + # closable unless we are streaming the response, as that's the + # only situation where we are going to have an iterator which + # may not have been exhausted yet. + if streaming and is_closable_iterator(self.iter_response): + iter_close = self.iter_response.close + try: + iter_close() + except Exception: + _cherrypy.log(traceback=True, severity=40) + + def run(self): + """Create a Request object using environ.""" + env = self.environ.get + + local = httputil.Host( + '', + int(env('SERVER_PORT', 80) or -1), + env('SERVER_NAME', ''), + ) + remote = httputil.Host( + env('REMOTE_ADDR', ''), + int(env('REMOTE_PORT', -1) or -1), + env('REMOTE_HOST', ''), + ) + scheme = env('wsgi.url_scheme') + sproto = env('ACTUAL_SERVER_PROTOCOL', "HTTP/1.1") + request, resp = self.cpapp.get_serving(local, remote, scheme, sproto) + + # LOGON_USER is served by IIS, and is the name of the + # user after having been mapped to a local account. + # Both IIS and Apache set REMOTE_USER, when possible. + request.login = env('LOGON_USER') or env('REMOTE_USER') or None + request.multithread = self.environ['wsgi.multithread'] + request.multiprocess = self.environ['wsgi.multiprocess'] + request.wsgi_environ = self.environ + request.prev = env('cherrypy.previous_request', None) + + meth = self.environ['REQUEST_METHOD'] + + path = httputil.urljoin( + self.environ.get('SCRIPT_NAME', ''), + self.environ.get('PATH_INFO', ''), + ) + qs = self.environ.get('QUERY_STRING', '') + + path, qs = self.recode_path_qs(path, qs) or (path, qs) + + rproto = self.environ.get('SERVER_PROTOCOL') + headers = self.translate_headers(self.environ) + rfile = self.environ['wsgi.input'] + request.run(meth, path, qs, rproto, headers, rfile) + + headerNames = { + 'HTTP_CGI_AUTHORIZATION': 'Authorization', + 'CONTENT_LENGTH': 'Content-Length', + 'CONTENT_TYPE': 'Content-Type', + 'REMOTE_HOST': 'Remote-Host', + 'REMOTE_ADDR': 'Remote-Addr', + } + + def recode_path_qs(self, path, qs): + if not six.PY3: + return + + # This isn't perfect; if the given PATH_INFO is in the + # wrong encoding, it may fail to match the appropriate config + # section URI. But meh. + old_enc = self.environ.get('wsgi.url_encoding', 'ISO-8859-1') + new_enc = self.cpapp.find_config( + self.environ.get('PATH_INFO', ''), + "request.uri_encoding", 'utf-8', + ) + if new_enc.lower() == old_enc.lower(): + return + + # Even though the path and qs are unicode, the WSGI server + # is required by PEP 3333 to coerce them to ISO-8859-1 + # masquerading as unicode. So we have to encode back to + # bytes and then decode again using the "correct" encoding. + try: + return ( + path.encode(old_enc).decode(new_enc), + qs.encode(old_enc).decode(new_enc), + ) + except (UnicodeEncodeError, UnicodeDecodeError): + # Just pass them through without transcoding and hope. + pass + + def translate_headers(self, environ): + """Translate CGI-environ header names to HTTP header names.""" + for cgiName in environ: + # We assume all incoming header keys are uppercase already. + if cgiName in self.headerNames: + yield self.headerNames[cgiName], environ[cgiName] + elif cgiName[:5] == "HTTP_": + # Hackish attempt at recovering original header names. + translatedHeader = cgiName[5:].replace("_", "-") + yield translatedHeader, environ[cgiName] + + +class CPWSGIApp(object): + + """A WSGI application object for a CherryPy Application.""" + + pipeline = [ + ('ExceptionTrapper', ExceptionTrapper), + ('InternalRedirector', InternalRedirector), + ] + """A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a + constructor that takes an initial, positional 'nextapp' argument, + plus optional keyword arguments, and returns a WSGI application + (that takes environ and start_response arguments). The 'name' can + be any you choose, and will correspond to keys in self.config.""" + + head = None + """Rather than nest all apps in the pipeline on each call, it's only + done the first time, and the result is memoized into self.head. Set + this to None again if you change self.pipeline after calling self.""" + + config = {} + """A dict whose keys match names listed in the pipeline. Each + value is a further dict which will be passed to the corresponding + named WSGI callable (from the pipeline) as keyword arguments.""" + + response_class = AppResponse + """The class to instantiate and return as the next app in the WSGI chain. + """ + + def __init__(self, cpapp, pipeline=None): + self.cpapp = cpapp + self.pipeline = self.pipeline[:] + if pipeline: + self.pipeline.extend(pipeline) + self.config = self.config.copy() + + def tail(self, environ, start_response): + """WSGI application callable for the actual CherryPy application. + + You probably shouldn't call this; call self.__call__ instead, + so that any WSGI middleware in self.pipeline can run first. + """ + return self.response_class(environ, start_response, self.cpapp) + + def __call__(self, environ, start_response): + head = self.head + if head is None: + # Create and nest the WSGI apps in our pipeline (in reverse order). + # Then memoize the result in self.head. + head = self.tail + for name, callable in self.pipeline[::-1]: + conf = self.config.get(name, {}) + head = callable(head, **conf) + self.head = head + return head(environ, start_response) + + def namespace_handler(self, k, v): + """Config handler for the 'wsgi' namespace.""" + if k == "pipeline": + # Note this allows multiple 'wsgi.pipeline' config entries + # (but each entry will be processed in a 'random' order). + # It should also allow developers to set default middleware + # in code (passed to self.__init__) that deployers can add to + # (but not remove) via config. + self.pipeline.extend(v) + elif k == "response_class": + self.response_class = v + else: + name, arg = k.split(".", 1) + bucket = self.config.setdefault(name, {}) + bucket[arg] = v diff --git a/deps/cherrypy/_cpwsgi_server.py b/deps/cherrypy/_cpwsgi_server.py new file mode 100644 index 00000000..874e2e9f --- /dev/null +++ b/deps/cherrypy/_cpwsgi_server.py @@ -0,0 +1,70 @@ +"""WSGI server interface (see PEP 333). This adds some CP-specific bits to +the framework-agnostic wsgiserver package. +""" +import sys + +import cherrypy +from cherrypy import wsgiserver + + +class CPWSGIServer(wsgiserver.CherryPyWSGIServer): + + """Wrapper for wsgiserver.CherryPyWSGIServer. + + wsgiserver has been designed to not reference CherryPy in any way, + so that it can be used in other frameworks and applications. Therefore, + we wrap it here, so we can set our own mount points from cherrypy.tree + and apply some attributes from config -> cherrypy.server -> wsgiserver. + """ + + def __init__(self, server_adapter=cherrypy.server): + self.server_adapter = server_adapter + self.max_request_header_size = ( + self.server_adapter.max_request_header_size or 0 + ) + self.max_request_body_size = ( + self.server_adapter.max_request_body_size or 0 + ) + + server_name = (self.server_adapter.socket_host or + self.server_adapter.socket_file or + None) + + self.wsgi_version = self.server_adapter.wsgi_version + s = wsgiserver.CherryPyWSGIServer + s.__init__(self, server_adapter.bind_addr, cherrypy.tree, + self.server_adapter.thread_pool, + server_name, + max=self.server_adapter.thread_pool_max, + request_queue_size=self.server_adapter.socket_queue_size, + timeout=self.server_adapter.socket_timeout, + shutdown_timeout=self.server_adapter.shutdown_timeout, + accepted_queue_size=self.server_adapter.accepted_queue_size, + accepted_queue_timeout=self.server_adapter.accepted_queue_timeout, + ) + self.protocol = self.server_adapter.protocol_version + self.nodelay = self.server_adapter.nodelay + + if sys.version_info >= (3, 0): + ssl_module = self.server_adapter.ssl_module or 'builtin' + else: + ssl_module = self.server_adapter.ssl_module or 'pyopenssl' + if self.server_adapter.ssl_context: + adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) + self.ssl_adapter = adapter_class( + self.server_adapter.ssl_certificate, + self.server_adapter.ssl_private_key, + self.server_adapter.ssl_certificate_chain) + self.ssl_adapter.context = self.server_adapter.ssl_context + elif self.server_adapter.ssl_certificate: + adapter_class = wsgiserver.get_ssl_adapter_class(ssl_module) + self.ssl_adapter = adapter_class( + self.server_adapter.ssl_certificate, + self.server_adapter.ssl_private_key, + self.server_adapter.ssl_certificate_chain) + + self.stats['Enabled'] = getattr( + self.server_adapter, 'statistics', False) + + def error_log(self, msg="", level=20, traceback=False): + cherrypy.engine.log(msg, level, traceback) diff --git a/deps/cherrypy/_helper.py b/deps/cherrypy/_helper.py new file mode 100644 index 00000000..23357bc8 --- /dev/null +++ b/deps/cherrypy/_helper.py @@ -0,0 +1,298 @@ +""" +Helper functions for CP apps +""" + +import six + +from cherrypy._cpcompat import urljoin as _urljoin, urlencode as _urlencode +from cherrypy._cpcompat import text_or_bytes + +import cherrypy + + +def expose(func=None, alias=None): + """ + Expose the function or class, optionally providing an alias or set of aliases. + """ + def expose_(func): + func.exposed = True + if alias is not None: + if isinstance(alias, text_or_bytes): + parents[alias.replace(".", "_")] = func + else: + for a in alias: + parents[a.replace(".", "_")] = func + return func + + import sys + import types + decoratable_types = types.FunctionType, types.MethodType, type, + if six.PY2: + # Old-style classes are type types.ClassType. + decoratable_types += types.ClassType, + if isinstance(func, decoratable_types): + if alias is None: + # @expose + func.exposed = True + return func + else: + # func = expose(func, alias) + parents = sys._getframe(1).f_locals + return expose_(func) + elif func is None: + if alias is None: + # @expose() + parents = sys._getframe(1).f_locals + return expose_ + else: + # @expose(alias="alias") or + # @expose(alias=["alias1", "alias2"]) + parents = sys._getframe(1).f_locals + return expose_ + else: + # @expose("alias") or + # @expose(["alias1", "alias2"]) + parents = sys._getframe(1).f_locals + alias = func + return expose_ + + +def popargs(*args, **kwargs): + """A decorator for _cp_dispatch + (cherrypy.dispatch.Dispatcher.dispatch_method_name). + + Optional keyword argument: handler=(Object or Function) + + Provides a _cp_dispatch function that pops off path segments into + cherrypy.request.params under the names specified. The dispatch + is then forwarded on to the next vpath element. + + Note that any existing (and exposed) member function of the class that + popargs is applied to will override that value of the argument. For + instance, if you have a method named "list" on the class decorated with + popargs, then accessing "/list" will call that function instead of popping + it off as the requested parameter. This restriction applies to all + _cp_dispatch functions. The only way around this restriction is to create + a "blank class" whose only function is to provide _cp_dispatch. + + If there are path elements after the arguments, or more arguments + are requested than are available in the vpath, then the 'handler' + keyword argument specifies the next object to handle the parameterized + request. If handler is not specified or is None, then self is used. + If handler is a function rather than an instance, then that function + will be called with the args specified and the return value from that + function used as the next object INSTEAD of adding the parameters to + cherrypy.request.args. + + This decorator may be used in one of two ways: + + As a class decorator: + @cherrypy.popargs('year', 'month', 'day') + class Blog: + def index(self, year=None, month=None, day=None): + #Process the parameters here; any url like + #/, /2009, /2009/12, or /2009/12/31 + #will fill in the appropriate parameters. + + def create(self): + #This link will still be available at /create. Defined functions + #take precedence over arguments. + + Or as a member of a class: + class Blog: + _cp_dispatch = cherrypy.popargs('year', 'month', 'day') + #... + + The handler argument may be used to mix arguments with built in functions. + For instance, the following setup allows different activities at the + day, month, and year level: + + class DayHandler: + def index(self, year, month, day): + #Do something with this day; probably list entries + + def delete(self, year, month, day): + #Delete all entries for this day + + @cherrypy.popargs('day', handler=DayHandler()) + class MonthHandler: + def index(self, year, month): + #Do something with this month; probably list entries + + def delete(self, year, month): + #Delete all entries for this month + + @cherrypy.popargs('month', handler=MonthHandler()) + class YearHandler: + def index(self, year): + #Do something with this year + + #... + + @cherrypy.popargs('year', handler=YearHandler()) + class Root: + def index(self): + #... + + """ + + # Since keyword arg comes after *args, we have to process it ourselves + # for lower versions of python. + + handler = None + handler_call = False + for k, v in kwargs.items(): + if k == 'handler': + handler = v + else: + raise TypeError( + "cherrypy.popargs() got an unexpected keyword argument '{0}'" + .format(k) + ) + + import inspect + + if handler is not None \ + and (hasattr(handler, '__call__') or inspect.isclass(handler)): + handler_call = True + + def decorated(cls_or_self=None, vpath=None): + if inspect.isclass(cls_or_self): + # cherrypy.popargs is a class decorator + cls = cls_or_self + setattr(cls, cherrypy.dispatch.Dispatcher.dispatch_method_name, decorated) + return cls + + # We're in the actual function + self = cls_or_self + parms = {} + for arg in args: + if not vpath: + break + parms[arg] = vpath.pop(0) + + if handler is not None: + if handler_call: + return handler(**parms) + else: + cherrypy.request.params.update(parms) + return handler + + cherrypy.request.params.update(parms) + + # If we are the ultimate handler, then to prevent our _cp_dispatch + # from being called again, we will resolve remaining elements through + # getattr() directly. + if vpath: + return getattr(self, vpath.pop(0), None) + else: + return self + + return decorated + + +def url(path="", qs="", script_name=None, base=None, relative=None): + """Create an absolute URL for the given path. + + If 'path' starts with a slash ('/'), this will return + (base + script_name + path + qs). + If it does not start with a slash, this returns + (base + script_name [+ request.path_info] + path + qs). + + If script_name is None, cherrypy.request will be used + to find a script_name, if available. + + If base is None, cherrypy.request.base will be used (if available). + Note that you can use cherrypy.tools.proxy to change this. + + Finally, note that this function can be used to obtain an absolute URL + for the current request path (minus the querystring) by passing no args. + If you call url(qs=cherrypy.request.query_string), you should get the + original browser URL (assuming no internal redirections). + + If relative is None or not provided, request.app.relative_urls will + be used (if available, else False). If False, the output will be an + absolute URL (including the scheme, host, vhost, and script_name). + If True, the output will instead be a URL that is relative to the + current request path, perhaps including '..' atoms. If relative is + the string 'server', the output will instead be a URL that is + relative to the server root; i.e., it will start with a slash. + """ + if isinstance(qs, (tuple, list, dict)): + qs = _urlencode(qs) + if qs: + qs = '?' + qs + + if cherrypy.request.app: + if not path.startswith("/"): + # Append/remove trailing slash from path_info as needed + # (this is to support mistyped URL's without redirecting; + # if you want to redirect, use tools.trailing_slash). + pi = cherrypy.request.path_info + if cherrypy.request.is_index is True: + if not pi.endswith('/'): + pi = pi + '/' + elif cherrypy.request.is_index is False: + if pi.endswith('/') and pi != '/': + pi = pi[:-1] + + if path == "": + path = pi + else: + path = _urljoin(pi, path) + + if script_name is None: + script_name = cherrypy.request.script_name + if base is None: + base = cherrypy.request.base + + newurl = base + script_name + path + qs + else: + # No request.app (we're being called outside a request). + # We'll have to guess the base from server.* attributes. + # This will produce very different results from the above + # if you're using vhosts or tools.proxy. + if base is None: + base = cherrypy.server.base() + + path = (script_name or "") + path + newurl = base + path + qs + + if './' in newurl: + # Normalize the URL by removing ./ and ../ + atoms = [] + for atom in newurl.split('/'): + if atom == '.': + pass + elif atom == '..': + atoms.pop() + else: + atoms.append(atom) + newurl = '/'.join(atoms) + + # At this point, we should have a fully-qualified absolute URL. + + if relative is None: + relative = getattr(cherrypy.request.app, "relative_urls", False) + + # See http://www.ietf.org/rfc/rfc2396.txt + if relative == 'server': + # "A relative reference beginning with a single slash character is + # termed an absolute-path reference, as defined by ..." + # This is also sometimes called "server-relative". + newurl = '/' + '/'.join(newurl.split('/', 3)[3:]) + elif relative: + # "A relative reference that does not begin with a scheme name + # or a slash character is termed a relative-path reference." + old = url(relative=False).split('/')[:-1] + new = newurl.split('/') + while old and new: + a, b = old[0], new[0] + if a != b: + break + old.pop(0) + new.pop(0) + new = (['..'] * len(old)) + new + newurl = '/'.join(new) + + return newurl diff --git a/deps/cherrypy/daemon.py b/deps/cherrypy/daemon.py new file mode 100644 index 00000000..e5e9a989 --- /dev/null +++ b/deps/cherrypy/daemon.py @@ -0,0 +1,106 @@ +"""The CherryPy daemon.""" + +import sys + +import cherrypy +from cherrypy.process import plugins, servers +from cherrypy import Application + + +def start(configfiles=None, daemonize=False, environment=None, + fastcgi=False, scgi=False, pidfile=None, imports=None, + cgi=False): + """Subscribe all engine plugins and start the engine.""" + sys.path = [''] + sys.path + for i in imports or []: + exec("import %s" % i) + + for c in configfiles or []: + cherrypy.config.update(c) + # If there's only one app mounted, merge config into it. + if len(cherrypy.tree.apps) == 1: + for app in cherrypy.tree.apps.values(): + if isinstance(app, Application): + app.merge(c) + + engine = cherrypy.engine + + if environment is not None: + cherrypy.config.update({'environment': environment}) + + # Only daemonize if asked to. + if daemonize: + # Don't print anything to stdout/sterr. + cherrypy.config.update({'log.screen': False}) + plugins.Daemonizer(engine).subscribe() + + if pidfile: + plugins.PIDFile(engine, pidfile).subscribe() + + if hasattr(engine, "signal_handler"): + engine.signal_handler.subscribe() + if hasattr(engine, "console_control_handler"): + engine.console_control_handler.subscribe() + + if (fastcgi and (scgi or cgi)) or (scgi and cgi): + cherrypy.log.error("You may only specify one of the cgi, fastcgi, and " + "scgi options.", 'ENGINE') + sys.exit(1) + elif fastcgi or scgi or cgi: + # Turn off autoreload when using *cgi. + cherrypy.config.update({'engine.autoreload.on': False}) + # Turn off the default HTTP server (which is subscribed by default). + cherrypy.server.unsubscribe() + + addr = cherrypy.server.bind_addr + cls = ( + servers.FlupFCGIServer if fastcgi else + servers.FlupSCGIServer if scgi else + servers.FlupCGIServer + ) + f = cls(application=cherrypy.tree, bindAddress=addr) + s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr) + s.subscribe() + + # Always start the engine; this will start all other services + try: + engine.start() + except: + # Assume the error has been logged already via bus.log. + sys.exit(1) + else: + engine.block() + + +def run(): + from optparse import OptionParser + + p = OptionParser() + p.add_option('-c', '--config', action="append", dest='config', + help="specify config file(s)") + p.add_option('-d', action="store_true", dest='daemonize', + help="run the server as a daemon") + p.add_option('-e', '--environment', dest='environment', default=None, + help="apply the given config environment") + p.add_option('-f', action="store_true", dest='fastcgi', + help="start a fastcgi server instead of the default HTTP " + "server") + p.add_option('-s', action="store_true", dest='scgi', + help="start a scgi server instead of the default HTTP server") + p.add_option('-x', action="store_true", dest='cgi', + help="start a cgi server instead of the default HTTP server") + p.add_option('-i', '--import', action="append", dest='imports', + help="specify modules to import") + p.add_option('-p', '--pidfile', dest='pidfile', default=None, + help="store the process id in the given file") + p.add_option('-P', '--Path', action="append", dest='Path', + help="add the given paths to sys.path") + options, args = p.parse_args() + + if options.Path: + for p in options.Path: + sys.path.insert(0, p) + + start(options.config, options.daemonize, + options.environment, options.fastcgi, options.scgi, + options.pidfile, options.imports, options.cgi) diff --git a/deps/cherrypy/process/__init__.py b/deps/cherrypy/process/__init__.py new file mode 100644 index 00000000..97f91ce7 --- /dev/null +++ b/deps/cherrypy/process/__init__.py @@ -0,0 +1,14 @@ +"""Site container for an HTTP server. + +A Web Site Process Bus object is used to connect applications, servers, +and frameworks with site-wide services such as daemonization, process +reload, signal handling, drop privileges, PID file management, logging +for all of these, and many more. + +The 'plugins' module defines a few abstract and concrete services for +use with the bus. Some use tool-specific channels; see the documentation +for each class. +""" + +from cherrypy.process.wspbus import bus # noqa +from cherrypy.process import plugins, servers # noqa diff --git a/deps/cherrypy/process/__pycache__/__init__.cpython-34.pyc b/deps/cherrypy/process/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..55235b2d Binary files /dev/null and b/deps/cherrypy/process/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/cherrypy/process/__pycache__/plugins.cpython-34.pyc b/deps/cherrypy/process/__pycache__/plugins.cpython-34.pyc new file mode 100644 index 00000000..63a64f07 Binary files /dev/null and b/deps/cherrypy/process/__pycache__/plugins.cpython-34.pyc differ diff --git a/deps/cherrypy/process/__pycache__/servers.cpython-34.pyc b/deps/cherrypy/process/__pycache__/servers.cpython-34.pyc new file mode 100644 index 00000000..308863e8 Binary files /dev/null and b/deps/cherrypy/process/__pycache__/servers.cpython-34.pyc differ diff --git a/deps/cherrypy/process/__pycache__/win32.cpython-34.pyc b/deps/cherrypy/process/__pycache__/win32.cpython-34.pyc new file mode 100644 index 00000000..caf0e990 Binary files /dev/null and b/deps/cherrypy/process/__pycache__/win32.cpython-34.pyc differ diff --git a/deps/cherrypy/process/__pycache__/wspbus.cpython-34.pyc b/deps/cherrypy/process/__pycache__/wspbus.cpython-34.pyc new file mode 100644 index 00000000..067d8072 Binary files /dev/null and b/deps/cherrypy/process/__pycache__/wspbus.cpython-34.pyc differ diff --git a/deps/cherrypy/process/plugins.py b/deps/cherrypy/process/plugins.py new file mode 100644 index 00000000..23c83e91 --- /dev/null +++ b/deps/cherrypy/process/plugins.py @@ -0,0 +1,740 @@ +"""Site services for use with a Web Site Process Bus.""" + +import os +import re +import signal as _signal +import sys +import time +import threading + +from cherrypy._cpcompat import text_or_bytes, get_daemon, get_thread_ident +from cherrypy._cpcompat import ntob, Timer, SetDaemonProperty + +# _module__file__base is used by Autoreload to make +# absolute any filenames retrieved from sys.modules which are not +# already absolute paths. This is to work around Python's quirk +# of importing the startup script and using a relative filename +# for it in sys.modules. +# +# Autoreload examines sys.modules afresh every time it runs. If an application +# changes the current directory by executing os.chdir(), then the next time +# Autoreload runs, it will not be able to find any filenames which are +# not absolute paths, because the current directory is not the same as when the +# module was first imported. Autoreload will then wrongly conclude the file +# has "changed", and initiate the shutdown/re-exec sequence. +# See ticket #917. +# For this workaround to have a decent probability of success, this module +# needs to be imported as early as possible, before the app has much chance +# to change the working directory. +_module__file__base = os.getcwd() + + +class SimplePlugin(object): + + """Plugin base class which auto-subscribes methods for known channels.""" + + bus = None + """A :class:`Bus `, usually cherrypy.engine. + """ + + def __init__(self, bus): + self.bus = bus + + def subscribe(self): + """Register this object as a (multi-channel) listener on the bus.""" + for channel in self.bus.listeners: + # Subscribe self.start, self.exit, etc. if present. + method = getattr(self, channel, None) + if method is not None: + self.bus.subscribe(channel, method) + + def unsubscribe(self): + """Unregister this object as a listener on the bus.""" + for channel in self.bus.listeners: + # Unsubscribe self.start, self.exit, etc. if present. + method = getattr(self, channel, None) + if method is not None: + self.bus.unsubscribe(channel, method) + + +class SignalHandler(object): + + """Register bus channels (and listeners) for system signals. + + You can modify what signals your application listens for, and what it does + when it receives signals, by modifying :attr:`SignalHandler.handlers`, + a dict of {signal name: callback} pairs. The default set is:: + + handlers = {'SIGTERM': self.bus.exit, + 'SIGHUP': self.handle_SIGHUP, + 'SIGUSR1': self.bus.graceful, + } + + The :func:`SignalHandler.handle_SIGHUP`` method calls + :func:`bus.restart()` + if the process is daemonized, but + :func:`bus.exit()` + if the process is attached to a TTY. This is because Unix window + managers tend to send SIGHUP to terminal windows when the user closes them. + + Feel free to add signals which are not available on every platform. + The :class:`SignalHandler` will ignore errors raised from attempting + to register handlers for unknown signals. + """ + + handlers = {} + """A map from signal names (e.g. 'SIGTERM') to handlers (e.g. bus.exit).""" + + signals = {} + """A map from signal numbers to names.""" + + for k, v in vars(_signal).items(): + if k.startswith('SIG') and not k.startswith('SIG_'): + signals[v] = k + del k, v + + def __init__(self, bus): + self.bus = bus + # Set default handlers + self.handlers = {'SIGTERM': self.bus.exit, + 'SIGHUP': self.handle_SIGHUP, + 'SIGUSR1': self.bus.graceful, + } + + if sys.platform[:4] == 'java': + del self.handlers['SIGUSR1'] + self.handlers['SIGUSR2'] = self.bus.graceful + self.bus.log("SIGUSR1 cannot be set on the JVM platform. " + "Using SIGUSR2 instead.") + self.handlers['SIGINT'] = self._jython_SIGINT_handler + + self._previous_handlers = {} + # used to determine is the process is a daemon in `self._is_daemonized` + self._original_pid = os.getpid() + + + def _jython_SIGINT_handler(self, signum=None, frame=None): + # See http://bugs.jython.org/issue1313 + self.bus.log('Keyboard Interrupt: shutting down bus') + self.bus.exit() + + def _is_daemonized(self): + """Return boolean indicating if the current process is + running as a daemon. + + The criteria to determine the `daemon` condition is to verify + if the current pid is not the same as the one that got used on + the initial construction of the plugin *and* the stdin is not + connected to a terminal. + + The sole validation of the tty is not enough when the plugin + is executing inside other process like in a CI tool + (Buildbot, Jenkins). + """ + if (self._original_pid != os.getpid() and + not os.isatty(sys.stdin.fileno())): + return True + else: + return False + + + def subscribe(self): + """Subscribe self.handlers to signals.""" + for sig, func in self.handlers.items(): + try: + self.set_handler(sig, func) + except ValueError: + pass + + def unsubscribe(self): + """Unsubscribe self.handlers from signals.""" + for signum, handler in self._previous_handlers.items(): + signame = self.signals[signum] + + if handler is None: + self.bus.log("Restoring %s handler to SIG_DFL." % signame) + handler = _signal.SIG_DFL + else: + self.bus.log("Restoring %s handler %r." % (signame, handler)) + + try: + our_handler = _signal.signal(signum, handler) + if our_handler is None: + self.bus.log("Restored old %s handler %r, but our " + "handler was not registered." % + (signame, handler), level=30) + except ValueError: + self.bus.log("Unable to restore %s handler %r." % + (signame, handler), level=40, traceback=True) + + def set_handler(self, signal, listener=None): + """Subscribe a handler for the given signal (number or name). + + If the optional 'listener' argument is provided, it will be + subscribed as a listener for the given signal's channel. + + If the given signal name or number is not available on the current + platform, ValueError is raised. + """ + if isinstance(signal, text_or_bytes): + signum = getattr(_signal, signal, None) + if signum is None: + raise ValueError("No such signal: %r" % signal) + signame = signal + else: + try: + signame = self.signals[signal] + except KeyError: + raise ValueError("No such signal: %r" % signal) + signum = signal + + prev = _signal.signal(signum, self._handle_signal) + self._previous_handlers[signum] = prev + + if listener is not None: + self.bus.log("Listening for %s." % signame) + self.bus.subscribe(signame, listener) + + def _handle_signal(self, signum=None, frame=None): + """Python signal handler (self.set_handler subscribes it for you).""" + signame = self.signals[signum] + self.bus.log("Caught signal %s." % signame) + self.bus.publish(signame) + + def handle_SIGHUP(self): + """Restart if daemonized, else exit.""" + if self._is_daemonized(): + self.bus.log("SIGHUP caught while daemonized. Restarting.") + self.bus.restart() + else: + # not daemonized (may be foreground or background) + self.bus.log("SIGHUP caught but not daemonized. Exiting.") + self.bus.exit() + + +try: + import pwd + import grp +except ImportError: + pwd, grp = None, None + + +class DropPrivileges(SimplePlugin): + + """Drop privileges. uid/gid arguments not available on Windows. + + Special thanks to `Gavin Baker `_ + """ + + def __init__(self, bus, umask=None, uid=None, gid=None): + SimplePlugin.__init__(self, bus) + self.finalized = False + self.uid = uid + self.gid = gid + self.umask = umask + + def _get_uid(self): + return self._uid + + def _set_uid(self, val): + if val is not None: + if pwd is None: + self.bus.log("pwd module not available; ignoring uid.", + level=30) + val = None + elif isinstance(val, text_or_bytes): + val = pwd.getpwnam(val)[2] + self._uid = val + uid = property(_get_uid, _set_uid, + doc="The uid under which to run. Availability: Unix.") + + def _get_gid(self): + return self._gid + + def _set_gid(self, val): + if val is not None: + if grp is None: + self.bus.log("grp module not available; ignoring gid.", + level=30) + val = None + elif isinstance(val, text_or_bytes): + val = grp.getgrnam(val)[2] + self._gid = val + gid = property(_get_gid, _set_gid, + doc="The gid under which to run. Availability: Unix.") + + def _get_umask(self): + return self._umask + + def _set_umask(self, val): + if val is not None: + try: + os.umask + except AttributeError: + self.bus.log("umask function not available; ignoring umask.", + level=30) + val = None + self._umask = val + umask = property( + _get_umask, + _set_umask, + doc="""The default permission mode for newly created files and + directories. + + Usually expressed in octal format, for example, ``0644``. + Availability: Unix, Windows. + """) + + def start(self): + # uid/gid + def current_ids(): + """Return the current (uid, gid) if available.""" + name, group = None, None + if pwd: + name = pwd.getpwuid(os.getuid())[0] + if grp: + group = grp.getgrgid(os.getgid())[0] + return name, group + + if self.finalized: + if not (self.uid is None and self.gid is None): + self.bus.log('Already running as uid: %r gid: %r' % + current_ids()) + else: + if self.uid is None and self.gid is None: + if pwd or grp: + self.bus.log('uid/gid not set', level=30) + else: + self.bus.log('Started as uid: %r gid: %r' % current_ids()) + if self.gid is not None: + os.setgid(self.gid) + os.setgroups([]) + if self.uid is not None: + os.setuid(self.uid) + self.bus.log('Running as uid: %r gid: %r' % current_ids()) + + # umask + if self.finalized: + if self.umask is not None: + self.bus.log('umask already set to: %03o' % self.umask) + else: + if self.umask is None: + self.bus.log('umask not set', level=30) + else: + old_umask = os.umask(self.umask) + self.bus.log('umask old: %03o, new: %03o' % + (old_umask, self.umask)) + + self.finalized = True + # This is slightly higher than the priority for server.start + # in order to facilitate the most common use: starting on a low + # port (which requires root) and then dropping to another user. + start.priority = 77 + + +class Daemonizer(SimplePlugin): + + """Daemonize the running script. + + Use this with a Web Site Process Bus via:: + + Daemonizer(bus).subscribe() + + When this component finishes, the process is completely decoupled from + the parent environment. Please note that when this component is used, + the return code from the parent process will still be 0 if a startup + error occurs in the forked children. Errors in the initial daemonizing + process still return proper exit codes. Therefore, if you use this + plugin to daemonize, don't use the return code as an accurate indicator + of whether the process fully started. In fact, that return code only + indicates if the process succesfully finished the first fork. + """ + + def __init__(self, bus, stdin='/dev/null', stdout='/dev/null', + stderr='/dev/null'): + SimplePlugin.__init__(self, bus) + self.stdin = stdin + self.stdout = stdout + self.stderr = stderr + self.finalized = False + + def start(self): + if self.finalized: + self.bus.log('Already deamonized.') + + # forking has issues with threads: + # http://www.opengroup.org/onlinepubs/000095399/functions/fork.html + # "The general problem with making fork() work in a multi-threaded + # world is what to do with all of the threads..." + # So we check for active threads: + if threading.activeCount() != 1: + self.bus.log('There are %r active threads. ' + 'Daemonizing now may cause strange failures.' % + threading.enumerate(), level=30) + + # See http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 + # (or http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7) + # and http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012 + + # Finish up with the current stdout/stderr + sys.stdout.flush() + sys.stderr.flush() + + # Do first fork. + try: + pid = os.fork() + if pid == 0: + # This is the child process. Continue. + pass + else: + # This is the first parent. Exit, now that we've forked. + self.bus.log('Forking once.') + os._exit(0) + except OSError: + # Python raises OSError rather than returning negative numbers. + exc = sys.exc_info()[1] + sys.exit("%s: fork #1 failed: (%d) %s\n" + % (sys.argv[0], exc.errno, exc.strerror)) + + os.setsid() + + # Do second fork + try: + pid = os.fork() + if pid > 0: + self.bus.log('Forking twice.') + os._exit(0) # Exit second parent + except OSError: + exc = sys.exc_info()[1] + sys.exit("%s: fork #2 failed: (%d) %s\n" + % (sys.argv[0], exc.errno, exc.strerror)) + + os.chdir("/") + os.umask(0) + + si = open(self.stdin, "r") + so = open(self.stdout, "a+") + se = open(self.stderr, "a+") + + # os.dup2(fd, fd2) will close fd2 if necessary, + # so we don't explicitly close stdin/out/err. + # See http://docs.python.org/lib/os-fd-ops.html + os.dup2(si.fileno(), sys.stdin.fileno()) + os.dup2(so.fileno(), sys.stdout.fileno()) + os.dup2(se.fileno(), sys.stderr.fileno()) + + self.bus.log('Daemonized to PID: %s' % os.getpid()) + self.finalized = True + start.priority = 65 + + +class PIDFile(SimplePlugin): + + """Maintain a PID file via a WSPBus.""" + + def __init__(self, bus, pidfile): + SimplePlugin.__init__(self, bus) + self.pidfile = pidfile + self.finalized = False + + def start(self): + pid = os.getpid() + if self.finalized: + self.bus.log('PID %r already written to %r.' % (pid, self.pidfile)) + else: + open(self.pidfile, "wb").write(ntob("%s\n" % pid, 'utf8')) + self.bus.log('PID %r written to %r.' % (pid, self.pidfile)) + self.finalized = True + start.priority = 70 + + def exit(self): + try: + os.remove(self.pidfile) + self.bus.log('PID file removed: %r.' % self.pidfile) + except (KeyboardInterrupt, SystemExit): + raise + except: + pass + + +class PerpetualTimer(Timer): + + """A responsive subclass of threading.Timer whose run() method repeats. + + Use this timer only when you really need a very interruptible timer; + this checks its 'finished' condition up to 20 times a second, which can + results in pretty high CPU usage + """ + + def __init__(self, *args, **kwargs): + "Override parent constructor to allow 'bus' to be provided." + self.bus = kwargs.pop('bus', None) + super(PerpetualTimer, self).__init__(*args, **kwargs) + + def run(self): + while True: + self.finished.wait(self.interval) + if self.finished.isSet(): + return + try: + self.function(*self.args, **self.kwargs) + except Exception: + if self.bus: + self.bus.log( + "Error in perpetual timer thread function %r." % + self.function, level=40, traceback=True) + # Quit on first error to avoid massive logs. + raise + + +class BackgroundTask(SetDaemonProperty, threading.Thread): + + """A subclass of threading.Thread whose run() method repeats. + + Use this class for most repeating tasks. It uses time.sleep() to wait + for each interval, which isn't very responsive; that is, even if you call + self.cancel(), you'll have to wait until the sleep() call finishes before + the thread stops. To compensate, it defaults to being daemonic, which means + it won't delay stopping the whole process. + """ + + def __init__(self, interval, function, args=[], kwargs={}, bus=None): + threading.Thread.__init__(self) + self.interval = interval + self.function = function + self.args = args + self.kwargs = kwargs + self.running = False + self.bus = bus + + # default to daemonic + self.daemon = True + + def cancel(self): + self.running = False + + def run(self): + self.running = True + while self.running: + time.sleep(self.interval) + if not self.running: + return + try: + self.function(*self.args, **self.kwargs) + except Exception: + if self.bus: + self.bus.log("Error in background task thread function %r." + % self.function, level=40, traceback=True) + # Quit on first error to avoid massive logs. + raise + + +class Monitor(SimplePlugin): + + """WSPBus listener to periodically run a callback in its own thread.""" + + callback = None + """The function to call at intervals.""" + + frequency = 60 + """The time in seconds between callback runs.""" + + thread = None + """A :class:`BackgroundTask` + thread. + """ + + def __init__(self, bus, callback, frequency=60, name=None): + SimplePlugin.__init__(self, bus) + self.callback = callback + self.frequency = frequency + self.thread = None + self.name = name + + def start(self): + """Start our callback in its own background thread.""" + if self.frequency > 0: + threadname = self.name or self.__class__.__name__ + if self.thread is None: + self.thread = BackgroundTask(self.frequency, self.callback, + bus=self.bus) + self.thread.setName(threadname) + self.thread.start() + self.bus.log("Started monitor thread %r." % threadname) + else: + self.bus.log("Monitor thread %r already started." % threadname) + start.priority = 70 + + def stop(self): + """Stop our callback's background task thread.""" + if self.thread is None: + self.bus.log("No thread running for %s." % + self.name or self.__class__.__name__) + else: + if self.thread is not threading.currentThread(): + name = self.thread.getName() + self.thread.cancel() + if not get_daemon(self.thread): + self.bus.log("Joining %r" % name) + self.thread.join() + self.bus.log("Stopped thread %r." % name) + self.thread = None + + def graceful(self): + """Stop the callback's background task thread and restart it.""" + self.stop() + self.start() + + +class Autoreloader(Monitor): + + """Monitor which re-executes the process when files change. + + This :ref:`plugin` restarts the process (via :func:`os.execv`) + if any of the files it monitors change (or is deleted). By default, the + autoreloader monitors all imported modules; you can add to the + set by adding to ``autoreload.files``:: + + cherrypy.engine.autoreload.files.add(myFile) + + If there are imported files you do *not* wish to monitor, you can + adjust the ``match`` attribute, a regular expression. For example, + to stop monitoring cherrypy itself:: + + cherrypy.engine.autoreload.match = r'^(?!cherrypy).+' + + Like all :class:`Monitor` plugins, + the autoreload plugin takes a ``frequency`` argument. The default is + 1 second; that is, the autoreloader will examine files once each second. + """ + + files = None + """The set of files to poll for modifications.""" + + frequency = 1 + """The interval in seconds at which to poll for modified files.""" + + match = '.*' + """A regular expression by which to match filenames.""" + + def __init__(self, bus, frequency=1, match='.*'): + self.mtimes = {} + self.files = set() + self.match = match + Monitor.__init__(self, bus, self.run, frequency) + + def start(self): + """Start our own background task thread for self.run.""" + if self.thread is None: + self.mtimes = {} + Monitor.start(self) + start.priority = 70 + + def sysfiles(self): + """Return a Set of sys.modules filenames to monitor.""" + files = set() + for k, m in list(sys.modules.items()): + if re.match(self.match, k): + if ( + hasattr(m, '__loader__') and + hasattr(m.__loader__, 'archive') + ): + f = m.__loader__.archive + else: + f = getattr(m, '__file__', None) + if f is not None and not os.path.isabs(f): + # ensure absolute paths so a os.chdir() in the app + # doesn't break me + f = os.path.normpath( + os.path.join(_module__file__base, f)) + files.add(f) + return files + + def run(self): + """Reload the process if registered files have been modified.""" + for filename in self.sysfiles() | self.files: + if filename: + if filename.endswith('.pyc'): + filename = filename[:-1] + + oldtime = self.mtimes.get(filename, 0) + if oldtime is None: + # Module with no .py file. Skip it. + continue + + try: + mtime = os.stat(filename).st_mtime + except OSError: + # Either a module with no .py file, or it's been deleted. + mtime = None + + if filename not in self.mtimes: + # If a module has no .py file, this will be None. + self.mtimes[filename] = mtime + else: + if mtime is None or mtime > oldtime: + # The file has been deleted or modified. + self.bus.log("Restarting because %s changed." % + filename) + self.thread.cancel() + self.bus.log("Stopped thread %r." % + self.thread.getName()) + self.bus.restart() + return + + +class ThreadManager(SimplePlugin): + + """Manager for HTTP request threads. + + If you have control over thread creation and destruction, publish to + the 'acquire_thread' and 'release_thread' channels (for each thread). + This will register/unregister the current thread and publish to + 'start_thread' and 'stop_thread' listeners in the bus as needed. + + If threads are created and destroyed by code you do not control + (e.g., Apache), then, at the beginning of every HTTP request, + publish to 'acquire_thread' only. You should not publish to + 'release_thread' in this case, since you do not know whether + the thread will be re-used or not. The bus will call + 'stop_thread' listeners for you when it stops. + """ + + threads = None + """A map of {thread ident: index number} pairs.""" + + def __init__(self, bus): + self.threads = {} + SimplePlugin.__init__(self, bus) + self.bus.listeners.setdefault('acquire_thread', set()) + self.bus.listeners.setdefault('start_thread', set()) + self.bus.listeners.setdefault('release_thread', set()) + self.bus.listeners.setdefault('stop_thread', set()) + + def acquire_thread(self): + """Run 'start_thread' listeners for the current thread. + + If the current thread has already been seen, any 'start_thread' + listeners will not be run again. + """ + thread_ident = get_thread_ident() + if thread_ident not in self.threads: + # We can't just use get_ident as the thread ID + # because some platforms reuse thread ID's. + i = len(self.threads) + 1 + self.threads[thread_ident] = i + self.bus.publish('start_thread', i) + + def release_thread(self): + """Release the current thread and run 'stop_thread' listeners.""" + thread_ident = get_thread_ident() + i = self.threads.pop(thread_ident, None) + if i is not None: + self.bus.publish('stop_thread', i) + + def stop(self): + """Release all threads and run all 'stop_thread' listeners.""" + for thread_ident, i in self.threads.items(): + self.bus.publish('stop_thread', i) + self.threads.clear() + graceful = stop diff --git a/deps/cherrypy/process/servers.py b/deps/cherrypy/process/servers.py new file mode 100644 index 00000000..83304efd --- /dev/null +++ b/deps/cherrypy/process/servers.py @@ -0,0 +1,470 @@ +""" +Starting in CherryPy 3.1, cherrypy.server is implemented as an +:ref:`Engine Plugin`. It's an instance of +:class:`cherrypy._cpserver.Server`, which is a subclass of +:class:`cherrypy.process.servers.ServerAdapter`. The ``ServerAdapter`` class +is designed to control other servers, as well. + +Multiple servers/ports +====================== + +If you need to start more than one HTTP server (to serve on multiple ports, or +protocols, etc.), you can manually register each one and then start them all +with engine.start:: + + s1 = ServerAdapter(cherrypy.engine, MyWSGIServer(host='0.0.0.0', port=80)) + s2 = ServerAdapter(cherrypy.engine, + another.HTTPServer(host='127.0.0.1', + SSL=True)) + s1.subscribe() + s2.subscribe() + cherrypy.engine.start() + +.. index:: SCGI + +FastCGI/SCGI +============ + +There are also Flup\ **F**\ CGIServer and Flup\ **S**\ CGIServer classes in +:mod:`cherrypy.process.servers`. To start an fcgi server, for example, +wrap an instance of it in a ServerAdapter:: + + addr = ('0.0.0.0', 4000) + f = servers.FlupFCGIServer(application=cherrypy.tree, bindAddress=addr) + s = servers.ServerAdapter(cherrypy.engine, httpserver=f, bind_addr=addr) + s.subscribe() + +The :doc:`cherryd` startup script will do the above for +you via its `-f` flag. +Note that you need to download and install `flup `_ +yourself, whether you use ``cherryd`` or not. + +.. _fastcgi: +.. index:: FastCGI + +FastCGI +------- + +A very simple setup lets your cherry run with FastCGI. +You just need the flup library, +plus a running Apache server (with ``mod_fastcgi``) or lighttpd server. + +CherryPy code +^^^^^^^^^^^^^ + +hello.py:: + + #!/usr/bin/python + import cherrypy + + class HelloWorld: + \"""Sample request handler class.\""" + @cherrypy.expose + def index(self): + return "Hello world!" + + cherrypy.tree.mount(HelloWorld()) + # CherryPy autoreload must be disabled for the flup server to work + cherrypy.config.update({'engine.autoreload.on':False}) + +Then run :doc:`/deployguide/cherryd` with the '-f' arg:: + + cherryd -c -d -f -i hello.py + +Apache +^^^^^^ + +At the top level in httpd.conf:: + + FastCgiIpcDir /tmp + FastCgiServer /path/to/cherry.fcgi -idle-timeout 120 -processes 4 + +And inside the relevant VirtualHost section:: + + # FastCGI config + AddHandler fastcgi-script .fcgi + ScriptAliasMatch (.*$) /path/to/cherry.fcgi$1 + +Lighttpd +^^^^^^^^ + +For `Lighttpd `_ you can follow these +instructions. Within ``lighttpd.conf`` make sure ``mod_fastcgi`` is +active within ``server.modules``. Then, within your ``$HTTP["host"]`` +directive, configure your fastcgi script like the following:: + + $HTTP["url"] =~ "" { + fastcgi.server = ( + "/" => ( + "script.fcgi" => ( + "bin-path" => "/path/to/your/script.fcgi", + "socket" => "/tmp/script.sock", + "check-local" => "disable", + "disable-time" => 1, + "min-procs" => 1, + "max-procs" => 1, # adjust as needed + ), + ), + ) + } # end of $HTTP["url"] =~ "^/" + +Please see `Lighttpd FastCGI Docs +`_ for +an explanation of the possible configuration options. +""" + +import os +import sys +import time +import warnings + + +class ServerAdapter(object): + + """Adapter for an HTTP server. + + If you need to start more than one HTTP server (to serve on multiple + ports, or protocols, etc.), you can manually register each one and then + start them all with bus.start:: + + s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80)) + s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True)) + s1.subscribe() + s2.subscribe() + bus.start() + """ + + def __init__(self, bus, httpserver=None, bind_addr=None): + self.bus = bus + self.httpserver = httpserver + self.bind_addr = bind_addr + self.interrupt = None + self.running = False + + def subscribe(self): + self.bus.subscribe('start', self.start) + self.bus.subscribe('stop', self.stop) + + def unsubscribe(self): + self.bus.unsubscribe('start', self.start) + self.bus.unsubscribe('stop', self.stop) + + def start(self): + """Start the HTTP server.""" + if self.bind_addr is None: + on_what = "unknown interface (dynamic?)" + elif isinstance(self.bind_addr, tuple): + on_what = self._get_base() + else: + on_what = "socket file: %s" % self.bind_addr + + if self.running: + self.bus.log("Already serving on %s" % on_what) + return + + self.interrupt = None + if not self.httpserver: + raise ValueError("No HTTP server has been created.") + + if not os.environ.get('LISTEN_PID', None): + # Start the httpserver in a new thread. + if isinstance(self.bind_addr, tuple): + wait_for_free_port(*self.bind_addr) + + import threading + t = threading.Thread(target=self._start_http_thread) + t.setName("HTTPServer " + t.getName()) + t.start() + + self.wait() + self.running = True + self.bus.log("Serving on %s" % on_what) + start.priority = 75 + + def _get_base(self): + if not self.httpserver: + return '' + host, port = self.bind_addr + if getattr(self.httpserver, 'ssl_adapter', None): + scheme = "https" + if port != 443: + host += ":%s" % port + else: + scheme = "http" + if port != 80: + host += ":%s" % port + + return "%s://%s" % (scheme, host) + + def _start_http_thread(self): + """HTTP servers MUST be running in new threads, so that the + main thread persists to receive KeyboardInterrupt's. If an + exception is raised in the httpserver's thread then it's + trapped here, and the bus (and therefore our httpserver) + are shut down. + """ + try: + self.httpserver.start() + except KeyboardInterrupt: + self.bus.log(" hit: shutting down HTTP server") + self.interrupt = sys.exc_info()[1] + self.bus.exit() + except SystemExit: + self.bus.log("SystemExit raised: shutting down HTTP server") + self.interrupt = sys.exc_info()[1] + self.bus.exit() + raise + except: + self.interrupt = sys.exc_info()[1] + self.bus.log("Error in HTTP server: shutting down", + traceback=True, level=40) + self.bus.exit() + raise + + def wait(self): + """Wait until the HTTP server is ready to receive requests.""" + while not getattr(self.httpserver, "ready", False): + if self.interrupt: + raise self.interrupt + time.sleep(.1) + + # Wait for port to be occupied + if not os.environ.get('LISTEN_PID', None): + # Wait for port to be occupied if not running via socket-activation + # (for socket-activation the port will be managed by systemd ) + if isinstance(self.bind_addr, tuple): + host, port = self.bind_addr + wait_for_occupied_port(host, port) + + def stop(self): + """Stop the HTTP server.""" + if self.running: + # stop() MUST block until the server is *truly* stopped. + self.httpserver.stop() + # Wait for the socket to be truly freed. + if isinstance(self.bind_addr, tuple): + wait_for_free_port(*self.bind_addr) + self.running = False + self.bus.log("HTTP Server %s shut down" % self.httpserver) + else: + self.bus.log("HTTP Server %s already shut down" % self.httpserver) + stop.priority = 25 + + def restart(self): + """Restart the HTTP server.""" + self.stop() + self.start() + + +class FlupCGIServer(object): + + """Adapter for a flup.server.cgi.WSGIServer.""" + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.ready = False + + def start(self): + """Start the CGI server.""" + # We have to instantiate the server class here because its __init__ + # starts a threadpool. If we do it too early, daemonize won't work. + from flup.server.cgi import WSGIServer + + self.cgiserver = WSGIServer(*self.args, **self.kwargs) + self.ready = True + self.cgiserver.run() + + def stop(self): + """Stop the HTTP server.""" + self.ready = False + + +class FlupFCGIServer(object): + + """Adapter for a flup.server.fcgi.WSGIServer.""" + + def __init__(self, *args, **kwargs): + if kwargs.get('bindAddress', None) is None: + import socket + if not hasattr(socket, 'fromfd'): + raise ValueError( + 'Dynamic FCGI server not available on this platform. ' + 'You must use a static or external one by providing a ' + 'legal bindAddress.') + self.args = args + self.kwargs = kwargs + self.ready = False + + def start(self): + """Start the FCGI server.""" + # We have to instantiate the server class here because its __init__ + # starts a threadpool. If we do it too early, daemonize won't work. + from flup.server.fcgi import WSGIServer + self.fcgiserver = WSGIServer(*self.args, **self.kwargs) + # TODO: report this bug upstream to flup. + # If we don't set _oldSIGs on Windows, we get: + # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", + # line 108, in run + # self._restoreSignalHandlers() + # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", + # line 156, in _restoreSignalHandlers + # for signum,handler in self._oldSIGs: + # AttributeError: 'WSGIServer' object has no attribute '_oldSIGs' + self.fcgiserver._installSignalHandlers = lambda: None + self.fcgiserver._oldSIGs = [] + self.ready = True + self.fcgiserver.run() + + def stop(self): + """Stop the HTTP server.""" + # Forcibly stop the fcgi server main event loop. + self.fcgiserver._keepGoing = False + # Force all worker threads to die off. + self.fcgiserver._threadPool.maxSpare = ( + self.fcgiserver._threadPool._idleCount) + self.ready = False + + +class FlupSCGIServer(object): + + """Adapter for a flup.server.scgi.WSGIServer.""" + + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + self.ready = False + + def start(self): + """Start the SCGI server.""" + # We have to instantiate the server class here because its __init__ + # starts a threadpool. If we do it too early, daemonize won't work. + from flup.server.scgi import WSGIServer + self.scgiserver = WSGIServer(*self.args, **self.kwargs) + # TODO: report this bug upstream to flup. + # If we don't set _oldSIGs on Windows, we get: + # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", + # line 108, in run + # self._restoreSignalHandlers() + # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py", + # line 156, in _restoreSignalHandlers + # for signum,handler in self._oldSIGs: + # AttributeError: 'WSGIServer' object has no attribute '_oldSIGs' + self.scgiserver._installSignalHandlers = lambda: None + self.scgiserver._oldSIGs = [] + self.ready = True + self.scgiserver.run() + + def stop(self): + """Stop the HTTP server.""" + self.ready = False + # Forcibly stop the scgi server main event loop. + self.scgiserver._keepGoing = False + # Force all worker threads to die off. + self.scgiserver._threadPool.maxSpare = 0 + + +def client_host(server_host): + """Return the host on which a client can connect to the given listener.""" + if server_host == '0.0.0.0': + # 0.0.0.0 is INADDR_ANY, which should answer on localhost. + return '127.0.0.1' + if server_host in ('::', '::0', '::0.0.0.0'): + # :: is IN6ADDR_ANY, which should answer on localhost. + # ::0 and ::0.0.0.0 are non-canonical but common + # ways to write IN6ADDR_ANY. + return '::1' + return server_host + + +def check_port(host, port, timeout=1.0): + """Raise an error if the given port is not free on the given host.""" + if not host: + raise ValueError("Host values of '' or None are not allowed.") + host = client_host(host) + port = int(port) + + import socket + + # AF_INET or AF_INET6 socket + # Get the correct address family for our host (allows IPv6 addresses) + try: + info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, + socket.SOCK_STREAM) + except socket.gaierror: + if ':' in host: + info = [( + socket.AF_INET6, socket.SOCK_STREAM, 0, "", (host, port, 0, 0) + )] + else: + info = [(socket.AF_INET, socket.SOCK_STREAM, 0, "", (host, port))] + + for res in info: + af, socktype, proto, canonname, sa = res + s = None + try: + s = socket.socket(af, socktype, proto) + # See http://groups.google.com/group/cherrypy-users/ + # browse_frm/thread/bbfe5eb39c904fe0 + s.settimeout(timeout) + s.connect((host, port)) + s.close() + except socket.error: + if s: + s.close() + else: + raise IOError("Port %s is in use on %s; perhaps the previous " + "httpserver did not shut down properly." % + (repr(port), repr(host))) + + +# Feel free to increase these defaults on slow systems: +free_port_timeout = 0.1 +occupied_port_timeout = 1.0 + + +def wait_for_free_port(host, port, timeout=None): + """Wait for the specified port to become free (drop requests).""" + if not host: + raise ValueError("Host values of '' or None are not allowed.") + if timeout is None: + timeout = free_port_timeout + + for trial in range(50): + try: + # we are expecting a free port, so reduce the timeout + check_port(host, port, timeout=timeout) + except IOError: + # Give the old server thread time to free the port. + time.sleep(timeout) + else: + return + + raise IOError("Port %r not free on %r" % (port, host)) + + +def wait_for_occupied_port(host, port, timeout=None): + """Wait for the specified port to become active (receive requests).""" + if not host: + raise ValueError("Host values of '' or None are not allowed.") + if timeout is None: + timeout = occupied_port_timeout + + for trial in range(50): + try: + check_port(host, port, timeout=timeout) + except IOError: + # port is occupied + return + else: + time.sleep(timeout) + + if host == client_host(host): + raise IOError("Port %r not bound on %r" % (port, host)) + + # On systems where a loopback interface is not available and the + # server is bound to all interfaces, it's difficult to determine + # whether the server is in fact occupying the port. In this case, + # just issue a warning and move on. See issue #1100. + msg = "Unable to verify that the server is bound on %r" % port + warnings.warn(msg) diff --git a/deps/cherrypy/process/win32.py b/deps/cherrypy/process/win32.py new file mode 100644 index 00000000..4afd3f14 --- /dev/null +++ b/deps/cherrypy/process/win32.py @@ -0,0 +1,180 @@ +"""Windows service. Requires pywin32.""" + +import os +import win32api +import win32con +import win32event +import win32service +import win32serviceutil + +from cherrypy.process import wspbus, plugins + + +class ConsoleCtrlHandler(plugins.SimplePlugin): + + """A WSPBus plugin for handling Win32 console events (like Ctrl-C).""" + + def __init__(self, bus): + self.is_set = False + plugins.SimplePlugin.__init__(self, bus) + + def start(self): + if self.is_set: + self.bus.log('Handler for console events already set.', level=40) + return + + result = win32api.SetConsoleCtrlHandler(self.handle, 1) + if result == 0: + self.bus.log('Could not SetConsoleCtrlHandler (error %r)' % + win32api.GetLastError(), level=40) + else: + self.bus.log('Set handler for console events.', level=40) + self.is_set = True + + def stop(self): + if not self.is_set: + self.bus.log('Handler for console events already off.', level=40) + return + + try: + result = win32api.SetConsoleCtrlHandler(self.handle, 0) + except ValueError: + # "ValueError: The object has not been registered" + result = 1 + + if result == 0: + self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' % + win32api.GetLastError(), level=40) + else: + self.bus.log('Removed handler for console events.', level=40) + self.is_set = False + + def handle(self, event): + """Handle console control events (like Ctrl-C).""" + if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT, + win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT, + win32con.CTRL_CLOSE_EVENT): + self.bus.log('Console event %s: shutting down bus' % event) + + # Remove self immediately so repeated Ctrl-C doesn't re-call it. + try: + self.stop() + except ValueError: + pass + + self.bus.exit() + # 'First to return True stops the calls' + return 1 + return 0 + + +class Win32Bus(wspbus.Bus): + + """A Web Site Process Bus implementation for Win32. + + Instead of time.sleep, this bus blocks using native win32event objects. + """ + + def __init__(self): + self.events = {} + wspbus.Bus.__init__(self) + + def _get_state_event(self, state): + """Return a win32event for the given state (creating it if needed).""" + try: + return self.events[state] + except KeyError: + event = win32event.CreateEvent(None, 0, 0, + "WSPBus %s Event (pid=%r)" % + (state.name, os.getpid())) + self.events[state] = event + return event + + def _get_state(self): + return self._state + + def _set_state(self, value): + self._state = value + event = self._get_state_event(value) + win32event.PulseEvent(event) + state = property(_get_state, _set_state) + + def wait(self, state, interval=0.1, channel=None): + """Wait for the given state(s), KeyboardInterrupt or SystemExit. + + Since this class uses native win32event objects, the interval + argument is ignored. + """ + if isinstance(state, (tuple, list)): + # Don't wait for an event that beat us to the punch ;) + if self.state not in state: + events = tuple([self._get_state_event(s) for s in state]) + win32event.WaitForMultipleObjects( + events, 0, win32event.INFINITE) + else: + # Don't wait for an event that beat us to the punch ;) + if self.state != state: + event = self._get_state_event(state) + win32event.WaitForSingleObject(event, win32event.INFINITE) + + +class _ControlCodes(dict): + + """Control codes used to "signal" a service via ControlService. + + User-defined control codes are in the range 128-255. We generally use + the standard Python value for the Linux signal and add 128. Example: + + >>> signal.SIGUSR1 + 10 + control_codes['graceful'] = 128 + 10 + """ + + def key_for(self, obj): + """For the given value, return its corresponding key.""" + for key, val in self.items(): + if val is obj: + return key + raise ValueError("The given object could not be found: %r" % obj) + +control_codes = _ControlCodes({'graceful': 138}) + + +def signal_child(service, command): + if command == 'stop': + win32serviceutil.StopService(service) + elif command == 'restart': + win32serviceutil.RestartService(service) + else: + win32serviceutil.ControlService(service, control_codes[command]) + + +class PyWebService(win32serviceutil.ServiceFramework): + + """Python Web Service.""" + + _svc_name_ = "Python Web Service" + _svc_display_name_ = "Python Web Service" + _svc_deps_ = None # sequence of service names on which this depends + _exe_name_ = "pywebsvc" + _exe_args_ = None # Default to no arguments + + # Only exists on Windows 2000 or later, ignored on windows NT + _svc_description_ = "Python Web Service" + + def SvcDoRun(self): + from cherrypy import process + process.bus.start() + process.bus.block() + + def SvcStop(self): + from cherrypy import process + self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) + process.bus.exit() + + def SvcOther(self, control): + process.bus.publish(control_codes.key_for(control)) + + +if __name__ == '__main__': + win32serviceutil.HandleCommandLine(PyWebService) diff --git a/deps/cherrypy/process/wspbus.py b/deps/cherrypy/process/wspbus.py new file mode 100644 index 00000000..4186a96e --- /dev/null +++ b/deps/cherrypy/process/wspbus.py @@ -0,0 +1,471 @@ +"""An implementation of the Web Site Process Bus. + +This module is completely standalone, depending only on the stdlib. + +Web Site Process Bus +-------------------- + +A Bus object is used to contain and manage site-wide behavior: +daemonization, HTTP server start/stop, process reload, signal handling, +drop privileges, PID file management, logging for all of these, +and many more. + +In addition, a Bus object provides a place for each web framework +to register code that runs in response to site-wide events (like +process start and stop), or which controls or otherwise interacts with +the site-wide components mentioned above. For example, a framework which +uses file-based templates would add known template filenames to an +autoreload component. + +Ideally, a Bus object will be flexible enough to be useful in a variety +of invocation scenarios: + + 1. The deployer starts a site from the command line via a + framework-neutral deployment script; applications from multiple frameworks + are mixed in a single site. Command-line arguments and configuration + files are used to define site-wide components such as the HTTP server, + WSGI component graph, autoreload behavior, signal handling, etc. + 2. The deployer starts a site via some other process, such as Apache; + applications from multiple frameworks are mixed in a single site. + Autoreload and signal handling (from Python at least) are disabled. + 3. The deployer starts a site via a framework-specific mechanism; + for example, when running tests, exploring tutorials, or deploying + single applications from a single framework. The framework controls + which site-wide components are enabled as it sees fit. + +The Bus object in this package uses topic-based publish-subscribe +messaging to accomplish all this. A few topic channels are built in +('start', 'stop', 'exit', 'graceful', 'log', and 'main'). Frameworks and +site containers are free to define their own. If a message is sent to a +channel that has not been defined or has no listeners, there is no effect. + +In general, there should only ever be a single Bus object per process. +Frameworks and site containers share a single Bus object by publishing +messages and subscribing listeners. + +The Bus object works as a finite state machine which models the current +state of the process. Bus methods move it from one state to another; +those methods then publish to subscribed listeners on the channel for +the new state.:: + + O + | + V + STOPPING --> STOPPED --> EXITING -> X + A A | + | \___ | + | \ | + | V V + STARTED <-- STARTING + +""" + +import atexit +import os +import sys +import threading +import time +import traceback as _traceback +import warnings +import operator + +# Here I save the value of os.getcwd(), which, if I am imported early enough, +# will be the directory from which the startup script was run. This is needed +# by _do_execv(), to change back to the original directory before execv()ing a +# new process. This is a defense against the application having changed the +# current working directory (which could make sys.executable "not found" if +# sys.executable is a relative-path, and/or cause other problems). +_startup_cwd = os.getcwd() + + +class ChannelFailures(Exception): + + """Exception raised when errors occur in a listener during Bus.publish(). + """ + delimiter = '\n' + + def __init__(self, *args, **kwargs): + # Don't use 'super' here; Exceptions are old-style in Py2.4 + # See https://github.com/cherrypy/cherrypy/issues/959 + Exception.__init__(self, *args, **kwargs) + self._exceptions = list() + + def handle_exception(self): + """Append the current exception to self.""" + self._exceptions.append(sys.exc_info()[1]) + + def get_instances(self): + """Return a list of seen exception instances.""" + return self._exceptions[:] + + def __str__(self): + exception_strings = map(repr, self.get_instances()) + return self.delimiter.join(exception_strings) + + __repr__ = __str__ + + def __bool__(self): + return bool(self._exceptions) + __nonzero__ = __bool__ + +# Use a flag to indicate the state of the bus. + + +class _StateEnum(object): + + class State(object): + name = None + + def __repr__(self): + return "states.%s" % self.name + + def __setattr__(self, key, value): + if isinstance(value, self.State): + value.name = key + object.__setattr__(self, key, value) +states = _StateEnum() +states.STOPPED = states.State() +states.STARTING = states.State() +states.STARTED = states.State() +states.STOPPING = states.State() +states.EXITING = states.State() + + +try: + import fcntl +except ImportError: + max_files = 0 +else: + try: + max_files = os.sysconf('SC_OPEN_MAX') + except AttributeError: + max_files = 1024 + + +class Bus(object): + + """Process state-machine and messenger for HTTP site deployment. + + All listeners for a given channel are guaranteed to be called even + if others at the same channel fail. Each failure is logged, but + execution proceeds on to the next listener. The only way to stop all + processing from inside a listener is to raise SystemExit and stop the + whole server. + """ + + states = states + state = states.STOPPED + execv = False + max_cloexec_files = max_files + + def __init__(self): + self.execv = False + self.state = states.STOPPED + channels = 'start', 'stop', 'exit', 'graceful', 'log', 'main' + self.listeners = dict( + (channel, set()) + for channel in channels + ) + self._priorities = {} + + def subscribe(self, channel, callback, priority=None): + """Add the given callback at the given channel (if not present).""" + ch_listeners = self.listeners.setdefault(channel, set()) + ch_listeners.add(callback) + + if priority is None: + priority = getattr(callback, 'priority', 50) + self._priorities[(channel, callback)] = priority + + def unsubscribe(self, channel, callback): + """Discard the given callback (if present).""" + listeners = self.listeners.get(channel) + if listeners and callback in listeners: + listeners.discard(callback) + del self._priorities[(channel, callback)] + + def publish(self, channel, *args, **kwargs): + """Return output of all subscribers for the given channel.""" + if channel not in self.listeners: + return [] + + exc = ChannelFailures() + output = [] + + raw_items = ( + (self._priorities[(channel, listener)], listener) + for listener in self.listeners[channel] + ) + items = sorted(raw_items, key=operator.itemgetter(0)) + for priority, listener in items: + try: + output.append(listener(*args, **kwargs)) + except KeyboardInterrupt: + raise + except SystemExit: + e = sys.exc_info()[1] + # If we have previous errors ensure the exit code is non-zero + if exc and e.code == 0: + e.code = 1 + raise + except: + exc.handle_exception() + if channel == 'log': + # Assume any further messages to 'log' will fail. + pass + else: + self.log("Error in %r listener %r" % (channel, listener), + level=40, traceback=True) + if exc: + raise exc + return output + + def _clean_exit(self): + """An atexit handler which asserts the Bus is not running.""" + if self.state != states.EXITING: + warnings.warn( + "The main thread is exiting, but the Bus is in the %r state; " + "shutting it down automatically now. You must either call " + "bus.block() after start(), or call bus.exit() before the " + "main thread exits." % self.state, RuntimeWarning) + self.exit() + + def start(self): + """Start all services.""" + atexit.register(self._clean_exit) + + self.state = states.STARTING + self.log('Bus STARTING') + try: + self.publish('start') + self.state = states.STARTED + self.log('Bus STARTED') + except (KeyboardInterrupt, SystemExit): + raise + except: + self.log("Shutting down due to error in start listener:", + level=40, traceback=True) + e_info = sys.exc_info()[1] + try: + self.exit() + except: + # Any stop/exit errors will be logged inside publish(). + pass + # Re-raise the original error + raise e_info + + def exit(self): + """Stop all services and prepare to exit the process.""" + exitstate = self.state + try: + self.stop() + + self.state = states.EXITING + self.log('Bus EXITING') + self.publish('exit') + # This isn't strictly necessary, but it's better than seeing + # "Waiting for child threads to terminate..." and then nothing. + self.log('Bus EXITED') + except: + # This method is often called asynchronously (whether thread, + # signal handler, console handler, or atexit handler), so we + # can't just let exceptions propagate out unhandled. + # Assume it's been logged and just die. + os._exit(70) # EX_SOFTWARE + + if exitstate == states.STARTING: + # exit() was called before start() finished, possibly due to + # Ctrl-C because a start listener got stuck. In this case, + # we could get stuck in a loop where Ctrl-C never exits the + # process, so we just call os.exit here. + os._exit(70) # EX_SOFTWARE + + def restart(self): + """Restart the process (may close connections). + + This method does not restart the process from the calling thread; + instead, it stops the bus and asks the main thread to call execv. + """ + self.execv = True + self.exit() + + def graceful(self): + """Advise all services to reload.""" + self.log('Bus graceful') + self.publish('graceful') + + def block(self, interval=0.1): + """Wait for the EXITING state, KeyboardInterrupt or SystemExit. + + This function is intended to be called only by the main thread. + After waiting for the EXITING state, it also waits for all threads + to terminate, and then calls os.execv if self.execv is True. This + design allows another thread to call bus.restart, yet have the main + thread perform the actual execv call (required on some platforms). + """ + try: + self.wait(states.EXITING, interval=interval, channel='main') + except (KeyboardInterrupt, IOError): + # The time.sleep call might raise + # "IOError: [Errno 4] Interrupted function call" on KBInt. + self.log('Keyboard Interrupt: shutting down bus') + self.exit() + except SystemExit: + self.log('SystemExit raised: shutting down bus') + self.exit() + raise + + # Waiting for ALL child threads to finish is necessary on OS X. + # See https://github.com/cherrypy/cherrypy/issues/581. + # It's also good to let them all shut down before allowing + # the main thread to call atexit handlers. + # See https://github.com/cherrypy/cherrypy/issues/751. + self.log("Waiting for child threads to terminate...") + for t in threading.enumerate(): + # Validate the we're not trying to join the MainThread + # that will cause a deadlock and the case exist when + # implemented as a windows service and in any other case + # that another thread executes cherrypy.engine.exit() + if ( + t != threading.currentThread() and + t.isAlive() and + not isinstance(t, threading._MainThread) + ): + # Note that any dummy (external) threads are always daemonic. + if hasattr(threading.Thread, "daemon"): + # Python 2.6+ + d = t.daemon + else: + d = t.isDaemon() + if not d: + self.log("Waiting for thread %s." % t.getName()) + t.join() + + if self.execv: + self._do_execv() + + def wait(self, state, interval=0.1, channel=None): + """Poll for the given state(s) at intervals; publish to channel.""" + if isinstance(state, (tuple, list)): + states = state + else: + states = [state] + + def _wait(): + while self.state not in states: + time.sleep(interval) + self.publish(channel) + + # From http://psyco.sourceforge.net/psycoguide/bugs.html: + # "The compiled machine code does not include the regular polling + # done by Python, meaning that a KeyboardInterrupt will not be + # detected before execution comes back to the regular Python + # interpreter. Your program cannot be interrupted if caught + # into an infinite Psyco-compiled loop." + try: + sys.modules['psyco'].cannotcompile(_wait) + except (KeyError, AttributeError): + pass + + _wait() + + def _do_execv(self): + """Re-execute the current process. + + This must be called from the main thread, because certain platforms + (OS X) don't allow execv to be called in a child thread very well. + """ + args = sys.argv[:] + self.log('Re-spawning %s' % ' '.join(args)) + + self._extend_pythonpath(os.environ) + + if sys.platform[:4] == 'java': + from _systemrestart import SystemRestart + raise SystemRestart + else: + args.insert(0, sys.executable) + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args] + + os.chdir(_startup_cwd) + if self.max_cloexec_files: + self._set_cloexec() + os.execv(sys.executable, args) + + @staticmethod + def _extend_pythonpath(env): + """ + If sys.path[0] is an empty string, the interpreter was likely + invoked with -m and the effective path is about to change on + re-exec. Add the current directory to $PYTHONPATH to ensure + that the new process sees the same path. + + This issue cannot be addressed in the general case because + Python cannot reliably reconstruct the + original command line (http://bugs.python.org/issue14208). + + (This idea filched from tornado.autoreload) + """ + path_prefix = '.' + os.pathsep + existing_path = env.get('PYTHONPATH', '') + needs_patch = ( + sys.path[0] == '' and + not existing_path.startswith(path_prefix) + ) + + if needs_patch: + env["PYTHONPATH"] = path_prefix + existing_path + + def _set_cloexec(self): + """Set the CLOEXEC flag on all open files (except stdin/out/err). + + If self.max_cloexec_files is an integer (the default), then on + platforms which support it, it represents the max open files setting + for the operating system. This function will be called just before + the process is restarted via os.execv() to prevent open files + from persisting into the new process. + + Set self.max_cloexec_files to 0 to disable this behavior. + """ + for fd in range(3, self.max_cloexec_files): # skip stdin/out/err + try: + flags = fcntl.fcntl(fd, fcntl.F_GETFD) + except IOError: + continue + fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC) + + def stop(self): + """Stop all services.""" + self.state = states.STOPPING + self.log('Bus STOPPING') + self.publish('stop') + self.state = states.STOPPED + self.log('Bus STOPPED') + + def start_with_callback(self, func, args=None, kwargs=None): + """Start 'func' in a new thread T, then start self (and return T).""" + if args is None: + args = () + if kwargs is None: + kwargs = {} + args = (func,) + args + + def _callback(func, *a, **kw): + self.wait(states.STARTED) + func(*a, **kw) + t = threading.Thread(target=_callback, args=args, kwargs=kwargs) + t.setName('Bus Callback ' + t.getName()) + t.start() + + self.start() + + return t + + def log(self, msg="", level=20, traceback=False): + """Log the given message. Append the last traceback if requested.""" + if traceback: + msg += "\n" + "".join(_traceback.format_exception(*sys.exc_info())) + self.publish('log', msg, level) + +bus = Bus() diff --git a/deps/cherrypy/scaffold/__init__.py b/deps/cherrypy/scaffold/__init__.py new file mode 100644 index 00000000..9bb4502a --- /dev/null +++ b/deps/cherrypy/scaffold/__init__.py @@ -0,0 +1,59 @@ +""", a CherryPy application. + +Use this as a base for creating new CherryPy applications. When you want +to make a new app, copy and paste this folder to some other location +(maybe site-packages) and rename it to the name of your project, +then tweak as desired. + +Even before any tweaking, this should serve a few demonstration pages. +Change to this directory and run: + + ../cherryd -c site.conf + +""" + +import cherrypy +from cherrypy import tools, url + +import os +local_dir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + + +@cherrypy.config(**{'tools.log_tracebacks.on': True}) +class Root: + + @cherrypy.expose + def index(self): + return """ +Try some other path, +or a default path.
+Or, just look at the pretty picture:
+ +""" % (url("other"), url("else"), + url("files/made_with_cherrypy_small.png")) + + @cherrypy.expose + def default(self, *args, **kwargs): + return "args: %s kwargs: %s" % (args, kwargs) + + @cherrypy.expose + def other(self, a=2, b='bananas', c=None): + cherrypy.response.headers['Content-Type'] = 'text/plain' + if c is None: + return "Have %d %s." % (int(a), b) + else: + return "Have %d %s, %s." % (int(a), b, c) + + files = tools.staticdir.handler( + section="/files", + dir=os.path.join(local_dir, "static"), + # Ignore .php files, etc. + match=r'\.(css|gif|html?|ico|jpe?g|js|png|swf|xml)$', + ) + + +root = Root() + +# Uncomment the following to use your own favicon instead of CP's default. +#favicon_path = os.path.join(local_dir, "favicon.ico") +#root.favicon_ico = tools.staticfile.handler(filename=favicon_path) diff --git a/deps/cherrypy/scaffold/__pycache__/__init__.cpython-34.pyc b/deps/cherrypy/scaffold/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..cc2c8d5e Binary files /dev/null and b/deps/cherrypy/scaffold/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__init__.py b/deps/cherrypy/test/__init__.py new file mode 100644 index 00000000..0927c170 --- /dev/null +++ b/deps/cherrypy/test/__init__.py @@ -0,0 +1,30 @@ +"""Regression test suite for CherryPy. + +Run 'nosetests -s test/' to exercise all tests. + +The '-s' flag instructs nose to output stdout messages, wihch is crucial to +the 'interactive' mode of webtest.py. If you run these tests without the '-s' +flag, don't be surprised if the test seems to hang: it's waiting for your +interactive input. +""" + +import os +import sys + + +def newexit(): + os._exit(1) + + +def setup(): + # We want to monkey patch sys.exit so that we can get some + # information about where exit is being called. + newexit._old = sys.exit + sys.exit = newexit + + +def teardown(): + try: + sys.exit = sys.exit._old + except AttributeError: + sys.exit = sys._exit diff --git a/deps/cherrypy/test/__pycache__/__init__.cpython-34.pyc b/deps/cherrypy/test/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..827d3940 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/_test_decorators.cpython-34.pyc b/deps/cherrypy/test/__pycache__/_test_decorators.cpython-34.pyc new file mode 100644 index 00000000..fd0bc64d Binary files /dev/null and b/deps/cherrypy/test/__pycache__/_test_decorators.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/_test_states_demo.cpython-34.pyc b/deps/cherrypy/test/__pycache__/_test_states_demo.cpython-34.pyc new file mode 100644 index 00000000..339f56d5 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/_test_states_demo.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/benchmark.cpython-34.pyc b/deps/cherrypy/test/__pycache__/benchmark.cpython-34.pyc new file mode 100644 index 00000000..43866735 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/benchmark.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/checkerdemo.cpython-34.pyc b/deps/cherrypy/test/__pycache__/checkerdemo.cpython-34.pyc new file mode 100644 index 00000000..86220e7b Binary files /dev/null and b/deps/cherrypy/test/__pycache__/checkerdemo.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/helper.cpython-34.pyc b/deps/cherrypy/test/__pycache__/helper.cpython-34.pyc new file mode 100644 index 00000000..09f474f1 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/helper.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/logtest.cpython-34.pyc b/deps/cherrypy/test/__pycache__/logtest.cpython-34.pyc new file mode 100644 index 00000000..699f87ac Binary files /dev/null and b/deps/cherrypy/test/__pycache__/logtest.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/modfastcgi.cpython-34.pyc b/deps/cherrypy/test/__pycache__/modfastcgi.cpython-34.pyc new file mode 100644 index 00000000..149a3010 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/modfastcgi.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/modfcgid.cpython-34.pyc b/deps/cherrypy/test/__pycache__/modfcgid.cpython-34.pyc new file mode 100644 index 00000000..49dc5813 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/modfcgid.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/modpy.cpython-34.pyc b/deps/cherrypy/test/__pycache__/modpy.cpython-34.pyc new file mode 100644 index 00000000..8b571fe3 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/modpy.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/modwsgi.cpython-34.pyc b/deps/cherrypy/test/__pycache__/modwsgi.cpython-34.pyc new file mode 100644 index 00000000..635eb299 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/modwsgi.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/sessiondemo.cpython-34.pyc b/deps/cherrypy/test/__pycache__/sessiondemo.cpython-34.pyc new file mode 100644 index 00000000..131b12d4 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/sessiondemo.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_auth_basic.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_auth_basic.cpython-34.pyc new file mode 100644 index 00000000..e3971031 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_auth_basic.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_auth_digest.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_auth_digest.cpython-34.pyc new file mode 100644 index 00000000..dfdd6902 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_auth_digest.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_bus.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_bus.cpython-34.pyc new file mode 100644 index 00000000..0799644a Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_bus.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_caching.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_caching.cpython-34.pyc new file mode 100644 index 00000000..3e7537b3 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_caching.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_compat.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_compat.cpython-34.pyc new file mode 100644 index 00000000..f6a9e0e7 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_compat.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_config.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_config.cpython-34.pyc new file mode 100644 index 00000000..6acb532c Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_config.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_config_server.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_config_server.cpython-34.pyc new file mode 100644 index 00000000..2aa08599 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_config_server.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_conn.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_conn.cpython-34.pyc new file mode 100644 index 00000000..f630f155 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_conn.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_core.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_core.cpython-34.pyc new file mode 100644 index 00000000..706469f7 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_core.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-34.pyc new file mode 100644 index 00000000..4b2fbeaf Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_encoding.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_encoding.cpython-34.pyc new file mode 100644 index 00000000..2e482278 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_encoding.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_etags.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_etags.cpython-34.pyc new file mode 100644 index 00000000..8e7f5a4c Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_etags.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_http.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_http.cpython-34.pyc new file mode 100644 index 00000000..7e0420fe Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_http.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_httpauth.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_httpauth.cpython-34.pyc new file mode 100644 index 00000000..5e0e6d5b Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_httpauth.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_httplib.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_httplib.cpython-34.pyc new file mode 100644 index 00000000..07e67122 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_httplib.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_iterator.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_iterator.cpython-34.pyc new file mode 100644 index 00000000..f3c1ed27 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_iterator.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_json.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_json.cpython-34.pyc new file mode 100644 index 00000000..4d3dd794 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_json.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_logging.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_logging.cpython-34.pyc new file mode 100644 index 00000000..374393ce Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_logging.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_mime.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_mime.cpython-34.pyc new file mode 100644 index 00000000..bd38ab7e Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_mime.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_misc_tools.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_misc_tools.cpython-34.pyc new file mode 100644 index 00000000..71c9acee Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_misc_tools.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_objectmapping.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_objectmapping.cpython-34.pyc new file mode 100644 index 00000000..e77392b8 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_objectmapping.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_params.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_params.cpython-34.pyc new file mode 100644 index 00000000..11c97ac2 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_params.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_proxy.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_proxy.cpython-34.pyc new file mode 100644 index 00000000..cb03ae4a Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_proxy.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_refleaks.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_refleaks.cpython-34.pyc new file mode 100644 index 00000000..0e42bd0e Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_refleaks.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_request_obj.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_request_obj.cpython-34.pyc new file mode 100644 index 00000000..c65675a4 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_request_obj.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_routes.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_routes.cpython-34.pyc new file mode 100644 index 00000000..56dfd3f9 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_routes.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_session.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_session.cpython-34.pyc new file mode 100644 index 00000000..6c08f7f7 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_session.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_sessionauthenticate.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_sessionauthenticate.cpython-34.pyc new file mode 100644 index 00000000..e711020e Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_sessionauthenticate.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_states.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_states.cpython-34.pyc new file mode 100644 index 00000000..4a36f1d3 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_states.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_static.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_static.cpython-34.pyc new file mode 100644 index 00000000..88f76daa Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_static.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_tools.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_tools.cpython-34.pyc new file mode 100644 index 00000000..d5d142f2 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_tools.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_tutorials.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_tutorials.cpython-34.pyc new file mode 100644 index 00000000..55921c9d Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_tutorials.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_virtualhost.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_virtualhost.cpython-34.pyc new file mode 100644 index 00000000..3c48e498 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_virtualhost.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_wsgi_ns.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_wsgi_ns.cpython-34.pyc new file mode 100644 index 00000000..6911ecf8 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_wsgi_ns.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-34.pyc new file mode 100644 index 00000000..198b5b5c Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_wsgi_vhost.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_wsgi_vhost.cpython-34.pyc new file mode 100644 index 00000000..1f5b2b56 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_wsgi_vhost.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_wsgiapps.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_wsgiapps.cpython-34.pyc new file mode 100644 index 00000000..72ef44e1 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_wsgiapps.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/test_xmlrpc.cpython-34.pyc b/deps/cherrypy/test/__pycache__/test_xmlrpc.cpython-34.pyc new file mode 100644 index 00000000..0e775641 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/test_xmlrpc.cpython-34.pyc differ diff --git a/deps/cherrypy/test/__pycache__/webtest.cpython-34.pyc b/deps/cherrypy/test/__pycache__/webtest.cpython-34.pyc new file mode 100644 index 00000000..9e547e44 Binary files /dev/null and b/deps/cherrypy/test/__pycache__/webtest.cpython-34.pyc differ diff --git a/deps/cherrypy/test/_test_decorators.py b/deps/cherrypy/test/_test_decorators.py new file mode 100644 index 00000000..06fbf0cb --- /dev/null +++ b/deps/cherrypy/test/_test_decorators.py @@ -0,0 +1,40 @@ +"""Test module for the @-decorator syntax, which is version-specific""" + +import cherrypy +from cherrypy import expose, tools +from cherrypy._cpcompat import ntob + + +class ExposeExamples(object): + + @expose + def no_call(self): + return "Mr E. R. Bradshaw" + + @expose() + def call_empty(self): + return "Mrs. B.J. Smegma" + + @expose("call_alias") + def nesbitt(self): + return "Mr Nesbitt" + + @expose(["alias1", "alias2"]) + def andrews(self): + return "Mr Ken Andrews" + + @expose(alias="alias3") + def watson(self): + return "Mr. and Mrs. Watson" + + +class ToolExamples(object): + + @expose + # This is here to demonstrate that using the config decorator + # does not overwrite other config attributes added by the Tool + # decorator (in this case response_headers). + @cherrypy.config(**{'response.stream': True}) + @tools.response_headers(headers=[('Content-Type', 'application/data')]) + def blah(self): + yield ntob("blah") diff --git a/deps/cherrypy/test/_test_states_demo.py b/deps/cherrypy/test/_test_states_demo.py new file mode 100644 index 00000000..75f9405c --- /dev/null +++ b/deps/cherrypy/test/_test_states_demo.py @@ -0,0 +1,68 @@ +import os +import sys +import time +starttime = time.time() + +import cherrypy + + +class Root: + + @cherrypy.expose + def index(self): + return "Hello World" + + @cherrypy.expose + def mtimes(self): + return repr(cherrypy.engine.publish("Autoreloader", "mtimes")) + + @cherrypy.expose + def pid(self): + return str(os.getpid()) + + @cherrypy.expose + def start(self): + return repr(starttime) + + @cherrypy.expose + def exit(self): + # This handler might be called before the engine is STARTED if an + # HTTP worker thread handles it before the HTTP server returns + # control to engine.start. We avoid that race condition here + # by waiting for the Bus to be STARTED. + cherrypy.engine.wait(state=cherrypy.engine.states.STARTED) + cherrypy.engine.exit() + + +def unsub_sig(): + cherrypy.log("unsubsig: %s" % cherrypy.config.get('unsubsig', False)) + if cherrypy.config.get('unsubsig', False): + cherrypy.log("Unsubscribing the default cherrypy signal handler") + cherrypy.engine.signal_handler.unsubscribe() + try: + from signal import signal, SIGTERM + except ImportError: + pass + else: + def old_term_handler(signum=None, frame=None): + cherrypy.log("I am an old SIGTERM handler.") + sys.exit(0) + cherrypy.log("Subscribing the new one.") + signal(SIGTERM, old_term_handler) +cherrypy.engine.subscribe('start', unsub_sig, priority=100) + + +def starterror(): + if cherrypy.config.get('starterror', False): + zerodiv = 1 / 0 +cherrypy.engine.subscribe('start', starterror, priority=6) + + +def log_test_case_name(): + if cherrypy.config.get('test_case_name', False): + cherrypy.log("STARTED FROM: %s" % + cherrypy.config.get('test_case_name')) +cherrypy.engine.subscribe('start', log_test_case_name, priority=6) + + +cherrypy.tree.mount(Root(), '/', {'/': {}}) diff --git a/deps/cherrypy/test/benchmark.py b/deps/cherrypy/test/benchmark.py new file mode 100644 index 00000000..ad8f5588 --- /dev/null +++ b/deps/cherrypy/test/benchmark.py @@ -0,0 +1,421 @@ +"""CherryPy Benchmark Tool + + Usage: + benchmark.py [options] + + --null: use a null Request object (to bench the HTTP server only) + --notests: start the server but do not run the tests; this allows + you to check the tested pages with a browser + --help: show this help message + --cpmodpy: run tests via apache on 54583 (with the builtin _cpmodpy) + --modpython: run tests via apache on 54583 (with modpython_gateway) + --ab=path: Use the ab script/executable at 'path' (see below) + --apache=path: Use the apache script/exe at 'path' (see below) + + To run the benchmarks, the Apache Benchmark tool "ab" must either be on + your system path, or specified via the --ab=path option. + + To run the modpython tests, the "apache" executable or script must be + on your system path, or provided via the --apache=path option. On some + platforms, "apache" may be called "apachectl" or "apache2ctl"--create + a symlink to them if needed. +""" + +import getopt +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + +import re +import sys +import time + +import cherrypy +from cherrypy._cpcompat import ntob +from cherrypy import _cperror, _cpmodpy +from cherrypy.lib import httputil + + +AB_PATH = "" +APACHE_PATH = "apache" +SCRIPT_NAME = "/cpbench/users/rdelon/apps/blog" + +__all__ = ['ABSession', 'Root', 'print_report', + 'run_standard_benchmarks', 'safe_threads', + 'size_report', 'startup', 'thread_report', + ] + +size_cache = {} + + +class Root: + + @cherrypy.expose + def index(self): + return """ + + CherryPy Benchmark + + + + +""" + + @cherrypy.expose + def hello(self): + return "Hello, world\r\n" + + @cherrypy.expose + def sizer(self, size): + resp = size_cache.get(size, None) + if resp is None: + size_cache[size] = resp = "X" * int(size) + return resp + + +cherrypy.config.update({ + 'log.error.file': '', + 'environment': 'production', + 'server.socket_host': '127.0.0.1', + 'server.socket_port': 54583, + 'server.max_request_header_size': 0, + 'server.max_request_body_size': 0, + 'engine.timeout_monitor.frequency': 0, +}) + +# Cheat mode on ;) +del cherrypy.config['tools.log_tracebacks.on'] +del cherrypy.config['tools.log_headers.on'] +del cherrypy.config['tools.trailing_slash.on'] + +appconf = { + '/static': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static', + 'tools.staticdir.root': curdir, + }, +} +app = cherrypy.tree.mount(Root(), SCRIPT_NAME, appconf) + + +class NullRequest: + + """A null HTTP request class, returning 200 and an empty body.""" + + def __init__(self, local, remote, scheme="http"): + pass + + def close(self): + pass + + def run(self, method, path, query_string, protocol, headers, rfile): + cherrypy.response.status = "200 OK" + cherrypy.response.header_list = [("Content-Type", 'text/html'), + ("Server", "Null CherryPy"), + ("Date", httputil.HTTPDate()), + ("Content-Length", "0"), + ] + cherrypy.response.body = [""] + return cherrypy.response + + +class NullResponse: + pass + + +class ABSession: + + """A session of 'ab', the Apache HTTP server benchmarking tool. + +Example output from ab: + +This is ApacheBench, Version 2.0.40-dev <$Revision: 1.121.2.1 $> apache-2.0 +Copyright (c) 1996 Adam Twiss, Zeus Technology Ltd, http://www.zeustech.net/ +Copyright (c) 1998-2002 The Apache Software Foundation, http://www.apache.org/ + +Benchmarking 127.0.0.1 (be patient) +Completed 100 requests +Completed 200 requests +Completed 300 requests +Completed 400 requests +Completed 500 requests +Completed 600 requests +Completed 700 requests +Completed 800 requests +Completed 900 requests + + +Server Software: CherryPy/3.1beta +Server Hostname: 127.0.0.1 +Server Port: 54583 + +Document Path: /static/index.html +Document Length: 14 bytes + +Concurrency Level: 10 +Time taken for tests: 9.643867 seconds +Complete requests: 1000 +Failed requests: 0 +Write errors: 0 +Total transferred: 189000 bytes +HTML transferred: 14000 bytes +Requests per second: 103.69 [#/sec] (mean) +Time per request: 96.439 [ms] (mean) +Time per request: 9.644 [ms] (mean, across all concurrent requests) +Transfer rate: 19.08 [Kbytes/sec] received + +Connection Times (ms) + min mean[+/-sd] median max +Connect: 0 0 2.9 0 10 +Processing: 20 94 7.3 90 130 +Waiting: 0 43 28.1 40 100 +Total: 20 95 7.3 100 130 + +Percentage of the requests served within a certain time (ms) + 50% 100 + 66% 100 + 75% 100 + 80% 100 + 90% 100 + 95% 100 + 98% 100 + 99% 110 + 100% 130 (longest request) +Finished 1000 requests +""" + + parse_patterns = [ + ('complete_requests', 'Completed', + ntob(r'^Complete requests:\s*(\d+)')), + ('failed_requests', 'Failed', + ntob(r'^Failed requests:\s*(\d+)')), + ('requests_per_second', 'req/sec', + ntob(r'^Requests per second:\s*([0-9.]+)')), + ('time_per_request_concurrent', 'msec/req', + ntob(r'^Time per request:\s*([0-9.]+).*concurrent requests\)$')), + ('transfer_rate', 'KB/sec', + ntob(r'^Transfer rate:\s*([0-9.]+)')) + ] + + def __init__(self, path=SCRIPT_NAME + "/hello", requests=1000, + concurrency=10): + self.path = path + self.requests = requests + self.concurrency = concurrency + + def args(self): + port = cherrypy.server.socket_port + assert self.concurrency > 0 + assert self.requests > 0 + # Don't use "localhost". + # Cf + # http://mail.python.org/pipermail/python-win32/2008-March/007050.html + return ("-k -n %s -c %s http://127.0.0.1:%s%s" % + (self.requests, self.concurrency, port, self.path)) + + def run(self): + # Parse output of ab, setting attributes on self + try: + self.output = _cpmodpy.read_process(AB_PATH or "ab", self.args()) + except: + print(_cperror.format_exc()) + raise + + for attr, name, pattern in self.parse_patterns: + val = re.search(pattern, self.output, re.MULTILINE) + if val: + val = val.group(1) + setattr(self, attr, val) + else: + setattr(self, attr, None) + + +safe_threads = (25, 50, 100, 200, 400) +if sys.platform in ("win32",): + # For some reason, ab crashes with > 50 threads on my Win2k laptop. + safe_threads = (10, 20, 30, 40, 50) + + +def thread_report(path=SCRIPT_NAME + "/hello", concurrency=safe_threads): + sess = ABSession(path) + attrs, names, patterns = list(zip(*sess.parse_patterns)) + avg = dict.fromkeys(attrs, 0.0) + + yield ('threads',) + names + for c in concurrency: + sess.concurrency = c + sess.run() + row = [c] + for attr in attrs: + val = getattr(sess, attr) + if val is None: + print(sess.output) + row = None + break + val = float(val) + avg[attr] += float(val) + row.append(val) + if row: + yield row + + # Add a row of averages. + yield ["Average"] + [str(avg[attr] / len(concurrency)) for attr in attrs] + + +def size_report(sizes=(10, 100, 1000, 10000, 100000, 100000000), + concurrency=50): + sess = ABSession(concurrency=concurrency) + attrs, names, patterns = list(zip(*sess.parse_patterns)) + yield ('bytes',) + names + for sz in sizes: + sess.path = "%s/sizer?size=%s" % (SCRIPT_NAME, sz) + sess.run() + yield [sz] + [getattr(sess, attr) for attr in attrs] + + +def print_report(rows): + for row in rows: + print("") + for i, val in enumerate(row): + sys.stdout.write(str(val).rjust(10) + " | ") + print("") + + +def run_standard_benchmarks(): + print("") + print("Client Thread Report (1000 requests, 14 byte response body, " + "%s server threads):" % cherrypy.server.thread_pool) + print_report(thread_report()) + + print("") + print("Client Thread Report (1000 requests, 14 bytes via staticdir, " + "%s server threads):" % cherrypy.server.thread_pool) + print_report(thread_report("%s/static/index.html" % SCRIPT_NAME)) + + print("") + print("Size Report (1000 requests, 50 client threads, " + "%s server threads):" % cherrypy.server.thread_pool) + print_report(size_report()) + + +# modpython and other WSGI # + +def startup_modpython(req=None): + """Start the CherryPy app server in 'serverless' mode (for modpython/WSGI). + """ + if cherrypy.engine.state == cherrypy._cpengine.STOPPED: + if req: + if "nullreq" in req.get_options(): + cherrypy.engine.request_class = NullRequest + cherrypy.engine.response_class = NullResponse + ab_opt = req.get_options().get("ab", "") + if ab_opt: + global AB_PATH + AB_PATH = ab_opt + cherrypy.engine.start() + if cherrypy.engine.state == cherrypy._cpengine.STARTING: + cherrypy.engine.wait() + return 0 # apache.OK + + +def run_modpython(use_wsgi=False): + print("Starting mod_python...") + pyopts = [] + + # Pass the null and ab=path options through Apache + if "--null" in opts: + pyopts.append(("nullreq", "")) + + if "--ab" in opts: + pyopts.append(("ab", opts["--ab"])) + + s = _cpmodpy.ModPythonServer + if use_wsgi: + pyopts.append(("wsgi.application", "cherrypy::tree")) + pyopts.append( + ("wsgi.startup", "cherrypy.test.benchmark::startup_modpython")) + handler = "modpython_gateway::handler" + s = s(port=54583, opts=pyopts, + apache_path=APACHE_PATH, handler=handler) + else: + pyopts.append( + ("cherrypy.setup", "cherrypy.test.benchmark::startup_modpython")) + s = s(port=54583, opts=pyopts, apache_path=APACHE_PATH) + + try: + s.start() + run() + finally: + s.stop() + + +if __name__ == '__main__': + longopts = ['cpmodpy', 'modpython', 'null', 'notests', + 'help', 'ab=', 'apache='] + try: + switches, args = getopt.getopt(sys.argv[1:], "", longopts) + opts = dict(switches) + except getopt.GetoptError: + print(__doc__) + sys.exit(2) + + if "--help" in opts: + print(__doc__) + sys.exit(0) + + if "--ab" in opts: + AB_PATH = opts['--ab'] + + if "--notests" in opts: + # Return without stopping the server, so that the pages + # can be tested from a standard web browser. + def run(): + port = cherrypy.server.socket_port + print("You may now open http://127.0.0.1:%s%s/" % + (port, SCRIPT_NAME)) + + if "--null" in opts: + print("Using null Request object") + else: + def run(): + end = time.time() - start + print("Started in %s seconds" % end) + if "--null" in opts: + print("\nUsing null Request object") + try: + try: + run_standard_benchmarks() + except: + print(_cperror.format_exc()) + raise + finally: + cherrypy.engine.exit() + + print("Starting CherryPy app server...") + + class NullWriter(object): + + """Suppresses the printing of socket errors.""" + + def write(self, data): + pass + sys.stderr = NullWriter() + + start = time.time() + + if "--cpmodpy" in opts: + run_modpython() + elif "--modpython" in opts: + run_modpython(use_wsgi=True) + else: + if "--null" in opts: + cherrypy.server.request_class = NullRequest + cherrypy.server.response_class = NullResponse + + cherrypy.engine.start_with_callback(run) + cherrypy.engine.block() diff --git a/deps/cherrypy/test/checkerdemo.py b/deps/cherrypy/test/checkerdemo.py new file mode 100644 index 00000000..68fb222b --- /dev/null +++ b/deps/cherrypy/test/checkerdemo.py @@ -0,0 +1,48 @@ +"""Demonstration app for cherrypy.checker. + +This application is intentionally broken and badly designed. +To demonstrate the output of the CherryPy Checker, simply execute +this module. +""" + +import os +import cherrypy +thisdir = os.path.dirname(os.path.abspath(__file__)) + + +class Root: + pass + +if __name__ == '__main__': + conf = {'/base': {'tools.staticdir.root': thisdir, + # Obsolete key. + 'throw_errors': True, + }, + # This entry should be OK. + '/base/static': {'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static'}, + # Warn on missing folder. + '/base/js': {'tools.staticdir.on': True, + 'tools.staticdir.dir': 'js'}, + # Warn on dir with an abs path even though we provide root. + '/base/static2': {'tools.staticdir.on': True, + 'tools.staticdir.dir': '/static'}, + # Warn on dir with a relative path with no root. + '/static3': {'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static'}, + # Warn on unknown namespace + '/unknown': {'toobles.gzip.on': True}, + # Warn special on cherrypy..* + '/cpknown': {'cherrypy.tools.encode.on': True}, + # Warn on mismatched types + '/conftype': {'request.show_tracebacks': 14}, + # Warn on unknown tool. + '/web': {'tools.unknown.on': True}, + # Warn on server.* in app config. + '/app1': {'server.socket_host': '0.0.0.0'}, + # Warn on 'localhost' + 'global': {'server.socket_host': 'localhost'}, + # Warn on '[name]' + '[/extra_brackets]': {}, + } + cherrypy.quickstart(Root(), config=conf) diff --git a/deps/cherrypy/test/helper.py b/deps/cherrypy/test/helper.py new file mode 100644 index 00000000..4f0dec78 --- /dev/null +++ b/deps/cherrypy/test/helper.py @@ -0,0 +1,547 @@ +"""A library of helper functions for the CherryPy test suite.""" + +import datetime +import logging +log = logging.getLogger(__name__) +import os +thisdir = os.path.abspath(os.path.dirname(__file__)) +serverpem = os.path.join(os.getcwd(), thisdir, 'test.pem') +import unittest + +import re +import sys +import time +import warnings +import io +import six + +import cherrypy +from cherrypy._cpcompat import text_or_bytes, copyitems, HTTPSConnection, ntob +from cherrypy.lib import httputil +from cherrypy.lib import gctools +from cherrypy.lib.reprconf import unrepr +from cherrypy.test import webtest + +# Use subprocess module from Python 2.7 on Python 2.3-2.6 +if sys.version_info < (2, 7): + import cherrypy._cpcompat_subprocess as subprocess +else: + import subprocess + +import nose + +_testconfig = None + + +def get_tst_config(overconf={}): + global _testconfig + if _testconfig is None: + conf = { + 'scheme': 'http', + 'protocol': "HTTP/1.1", + 'port': 54583, + 'host': '127.0.0.1', + 'validate': False, + 'server': 'wsgi', + } + try: + import testconfig + _conf = testconfig.config.get('supervisor', None) + if _conf is not None: + for k, v in _conf.items(): + if isinstance(v, text_or_bytes): + _conf[k] = unrepr(v) + conf.update(_conf) + except ImportError: + pass + _testconfig = conf + conf = _testconfig.copy() + conf.update(overconf) + + return conf + + +class Supervisor(object): + + """Base class for modeling and controlling servers during testing.""" + + def __init__(self, **kwargs): + for k, v in kwargs.items(): + if k == 'port': + setattr(self, k, int(v)) + setattr(self, k, v) + + +log_to_stderr = lambda msg, level: sys.stderr.write(msg + os.linesep) + + +class LocalSupervisor(Supervisor): + + """Base class for modeling/controlling servers which run in the same + process. + + When the server side runs in a different process, start/stop can dump all + state between each test module easily. When the server side runs in the + same process as the client, however, we have to do a bit more work to + ensure config and mounted apps are reset between tests. + """ + + using_apache = False + using_wsgi = False + + def __init__(self, **kwargs): + for k, v in kwargs.items(): + setattr(self, k, v) + + cherrypy.server.httpserver = self.httpserver_class + + # This is perhaps the wrong place for this call but this is the only + # place that i've found so far that I KNOW is early enough to set this. + cherrypy.config.update({'log.screen': False}) + engine = cherrypy.engine + if hasattr(engine, "signal_handler"): + engine.signal_handler.subscribe() + if hasattr(engine, "console_control_handler"): + engine.console_control_handler.subscribe() + #engine.subscribe('log', log_to_stderr) + + def start(self, modulename=None): + """Load and start the HTTP server.""" + if modulename: + # Unhook httpserver so cherrypy.server.start() creates a new + # one (with config from setup_server, if declared). + cherrypy.server.httpserver = None + + cherrypy.engine.start() + + self.sync_apps() + + def sync_apps(self): + """Tell the server about any apps which the setup functions mounted.""" + pass + + def stop(self): + td = getattr(self, 'teardown', None) + if td: + td() + + cherrypy.engine.exit() + + for name, server in copyitems(getattr(cherrypy, 'servers', {})): + server.unsubscribe() + del cherrypy.servers[name] + + +class NativeServerSupervisor(LocalSupervisor): + + """Server supervisor for the builtin HTTP server.""" + + httpserver_class = "cherrypy._cpnative_server.CPHTTPServer" + using_apache = False + using_wsgi = False + + def __str__(self): + return "Builtin HTTP Server on %s:%s" % (self.host, self.port) + + +class LocalWSGISupervisor(LocalSupervisor): + + """Server supervisor for the builtin WSGI server.""" + + httpserver_class = "cherrypy._cpwsgi_server.CPWSGIServer" + using_apache = False + using_wsgi = True + + def __str__(self): + return "Builtin WSGI Server on %s:%s" % (self.host, self.port) + + def sync_apps(self): + """Hook a new WSGI app into the origin server.""" + cherrypy.server.httpserver.wsgi_app = self.get_app() + + def get_app(self, app=None): + """Obtain a new (decorated) WSGI app to hook into the origin server.""" + if app is None: + app = cherrypy.tree + + if self.validate: + try: + from wsgiref import validate + except ImportError: + warnings.warn( + "Error importing wsgiref. The validator will not run.") + else: + # wraps the app in the validator + app = validate.validator(app) + + return app + + +def get_cpmodpy_supervisor(**options): + from cherrypy.test import modpy + sup = modpy.ModPythonSupervisor(**options) + sup.template = modpy.conf_cpmodpy + return sup + + +def get_modpygw_supervisor(**options): + from cherrypy.test import modpy + sup = modpy.ModPythonSupervisor(**options) + sup.template = modpy.conf_modpython_gateway + sup.using_wsgi = True + return sup + + +def get_modwsgi_supervisor(**options): + from cherrypy.test import modwsgi + return modwsgi.ModWSGISupervisor(**options) + + +def get_modfcgid_supervisor(**options): + from cherrypy.test import modfcgid + return modfcgid.ModFCGISupervisor(**options) + + +def get_modfastcgi_supervisor(**options): + from cherrypy.test import modfastcgi + return modfastcgi.ModFCGISupervisor(**options) + + +def get_wsgi_u_supervisor(**options): + cherrypy.server.wsgi_version = ('u', 0) + return LocalWSGISupervisor(**options) + + +class CPWebCase(webtest.WebCase): + + script_name = "" + scheme = "http" + + available_servers = {'wsgi': LocalWSGISupervisor, + 'wsgi_u': get_wsgi_u_supervisor, + 'native': NativeServerSupervisor, + 'cpmodpy': get_cpmodpy_supervisor, + 'modpygw': get_modpygw_supervisor, + 'modwsgi': get_modwsgi_supervisor, + 'modfcgid': get_modfcgid_supervisor, + 'modfastcgi': get_modfastcgi_supervisor, + } + default_server = "wsgi" + + @classmethod + def _setup_server(cls, supervisor, conf): + v = sys.version.split()[0] + log.info("Python version used to run this test script: %s" % v) + log.info("CherryPy version: %s" % cherrypy.__version__) + if supervisor.scheme == "https": + ssl = " (ssl)" + else: + ssl = "" + log.info("HTTP server version: %s%s" % (supervisor.protocol, ssl)) + log.info("PID: %s" % os.getpid()) + + cherrypy.server.using_apache = supervisor.using_apache + cherrypy.server.using_wsgi = supervisor.using_wsgi + + if sys.platform[:4] == 'java': + cherrypy.config.update({'server.nodelay': False}) + + if isinstance(conf, text_or_bytes): + parser = cherrypy.lib.reprconf.Parser() + conf = parser.dict_from_file(conf).get('global', {}) + else: + conf = conf or {} + baseconf = conf.copy() + baseconf.update({'server.socket_host': supervisor.host, + 'server.socket_port': supervisor.port, + 'server.protocol_version': supervisor.protocol, + 'environment': "test_suite", + }) + if supervisor.scheme == "https": + #baseconf['server.ssl_module'] = 'builtin' + baseconf['server.ssl_certificate'] = serverpem + baseconf['server.ssl_private_key'] = serverpem + + # helper must be imported lazily so the coverage tool + # can run against module-level statements within cherrypy. + # Also, we have to do "from cherrypy.test import helper", + # exactly like each test module does, because a relative import + # would stick a second instance of webtest in sys.modules, + # and we wouldn't be able to globally override the port anymore. + if supervisor.scheme == "https": + webtest.WebCase.HTTP_CONN = HTTPSConnection + return baseconf + + @classmethod + def setup_class(cls): + '' + # Creates a server + conf = get_tst_config() + supervisor_factory = cls.available_servers.get( + conf.get('server', 'wsgi')) + if supervisor_factory is None: + raise RuntimeError('Unknown server in config: %s' % conf['server']) + supervisor = supervisor_factory(**conf) + + # Copied from "run_test_suite" + cherrypy.config.reset() + baseconf = cls._setup_server(supervisor, conf) + cherrypy.config.update(baseconf) + setup_client() + + if hasattr(cls, 'setup_server'): + # Clear the cherrypy tree and clear the wsgi server so that + # it can be updated with the new root + cherrypy.tree = cherrypy._cptree.Tree() + cherrypy.server.httpserver = None + cls.setup_server() + # Add a resource for verifying there are no refleaks + # to *every* test class. + cherrypy.tree.mount(gctools.GCRoot(), '/gc') + cls.do_gc_test = True + supervisor.start(cls.__module__) + + cls.supervisor = supervisor + + @classmethod + def teardown_class(cls): + '' + if hasattr(cls, 'setup_server'): + cls.supervisor.stop() + + do_gc_test = False + + def test_gc(self): + if not self.do_gc_test: + return + + self.getPage("/gc/stats") + try: + self.assertBody("Statistics:") + except Exception: + "Failures occur intermittently. See #1420" + + def prefix(self): + return self.script_name.rstrip("/") + + def base(self): + if ((self.scheme == "http" and self.PORT == 80) or + (self.scheme == "https" and self.PORT == 443)): + port = "" + else: + port = ":%s" % self.PORT + + return "%s://%s%s%s" % (self.scheme, self.HOST, port, + self.script_name.rstrip("/")) + + def exit(self): + sys.exit() + + def getPage(self, url, headers=None, method="GET", body=None, + protocol=None, raise_subcls=None): + """Open the url. Return status, headers, body. + + `raise_subcls` must be a tuple with the exceptions classes + or a single exception class that are not going to be considered + a socket.error regardless that they were are subclass of a + socket.error and therefore not considered for a connection retry. + """ + if self.script_name: + url = httputil.urljoin(self.script_name, url) + return webtest.WebCase.getPage(self, url, headers, method, body, + protocol, raise_subcls) + + def skip(self, msg='skipped '): + raise nose.SkipTest(msg) + + def assertErrorPage(self, status, message=None, pattern=''): + """Compare the response body with a built in error page. + + The function will optionally look for the regexp pattern, + within the exception embedded in the error page.""" + + # This will never contain a traceback + page = cherrypy._cperror.get_error_page(status, message=message) + + # First, test the response body without checking the traceback. + # Stick a match-all group (.*) in to grab the traceback. + def esc(text): + return re.escape(ntob(text)) + epage = re.escape(page) + epage = epage.replace( + esc('
'),
+            esc('
') + ntob('(.*)') + esc('
')) + m = re.match(epage, self.body, re.DOTALL) + if not m: + self._handlewebError( + 'Error page does not match; expected:\n' + page) + return + + # Now test the pattern against the traceback + if pattern is None: + # Special-case None to mean that there should be *no* traceback. + if m and m.group(1): + self._handlewebError('Error page contains traceback') + else: + if (m is None) or ( + not re.search(ntob(re.escape(pattern), self.encoding), + m.group(1))): + msg = 'Error page does not contain %s in traceback' + self._handlewebError(msg % repr(pattern)) + + date_tolerance = 2 + + def assertEqualDates(self, dt1, dt2, seconds=None): + """Assert abs(dt1 - dt2) is within Y seconds.""" + if seconds is None: + seconds = self.date_tolerance + + if dt1 > dt2: + diff = dt1 - dt2 + else: + diff = dt2 - dt1 + if not diff < datetime.timedelta(seconds=seconds): + raise AssertionError('%r and %r are not within %r seconds.' % + (dt1, dt2, seconds)) + + +def _test_method_sorter(_, x, y): + """Monkeypatch the test sorter to always run test_gc last in each suite.""" + if x == 'test_gc': + return 1 + if y == 'test_gc': + return -1 + if x > y: + return 1 + if x < y: + return -1 + return 0 +unittest.TestLoader.sortTestMethodsUsing = _test_method_sorter + + +def setup_client(): + """Set up the WebCase classes to match the server's socket settings.""" + webtest.WebCase.PORT = cherrypy.server.socket_port + webtest.WebCase.HOST = cherrypy.server.socket_host + if cherrypy.server.ssl_certificate: + CPWebCase.scheme = 'https' + +# --------------------------- Spawning helpers --------------------------- # + + +class CPProcess(object): + + pid_file = os.path.join(thisdir, 'test.pid') + config_file = os.path.join(thisdir, 'test.conf') + config_template = """[global] +server.socket_host: '%(host)s' +server.socket_port: %(port)s +checker.on: False +log.screen: False +log.error_file: r'%(error_log)s' +log.access_file: r'%(access_log)s' +%(ssl)s +%(extra)s +""" + error_log = os.path.join(thisdir, 'test.error.log') + access_log = os.path.join(thisdir, 'test.access.log') + + def __init__(self, wait=False, daemonize=False, ssl=False, + socket_host=None, socket_port=None): + self.wait = wait + self.daemonize = daemonize + self.ssl = ssl + self.host = socket_host or cherrypy.server.socket_host + self.port = socket_port or cherrypy.server.socket_port + + def write_conf(self, extra=""): + if self.ssl: + serverpem = os.path.join(thisdir, 'test.pem') + ssl = """ +server.ssl_certificate: r'%s' +server.ssl_private_key: r'%s' +""" % (serverpem, serverpem) + else: + ssl = "" + + conf = self.config_template % { + 'host': self.host, + 'port': self.port, + 'error_log': self.error_log, + 'access_log': self.access_log, + 'ssl': ssl, + 'extra': extra, + } + with io.open(self.config_file, 'w', encoding='utf-8') as f: + f.write(six.text_type(conf)) + + def start(self, imports=None): + """Start cherryd in a subprocess.""" + cherrypy._cpserver.wait_for_free_port(self.host, self.port) + + args = [ + os.path.join(thisdir, '..', 'cherryd'), + '-c', self.config_file, + '-p', self.pid_file, + ] + + if not isinstance(imports, (list, tuple)): + imports = [imports] + for i in imports: + if i: + args.append('-i') + args.append(i) + + if self.daemonize: + args.append('-d') + + env = os.environ.copy() + # Make sure we import the cherrypy package in which this module is + # defined. + grandparentdir = os.path.abspath(os.path.join(thisdir, '..', '..')) + if env.get('PYTHONPATH', ''): + env['PYTHONPATH'] = os.pathsep.join( + (grandparentdir, env['PYTHONPATH'])) + else: + env['PYTHONPATH'] = grandparentdir + self._proc = subprocess.Popen([sys.executable] + args, env=env) + if self.wait: + self.exit_code = self._proc.wait() + else: + cherrypy._cpserver.wait_for_occupied_port(self.host, self.port) + + # Give the engine a wee bit more time to finish STARTING + if self.daemonize: + time.sleep(2) + else: + time.sleep(1) + + def get_pid(self): + if self.daemonize: + return int(open(self.pid_file, 'rb').read()) + return self._proc.pid + + def join(self): + """Wait for the process to exit.""" + if self.daemonize: + return self._join_daemon() + self._proc.wait() + + def _join_daemon(self): + try: + try: + # Mac, UNIX + os.wait() + except AttributeError: + # Windows + try: + pid = self.get_pid() + except IOError: + # Assume the subprocess deleted the pidfile on shutdown. + pass + else: + os.waitpid(pid, 0) + except OSError: + x = sys.exc_info()[1] + if x.args != (10, 'No child processes'): + raise diff --git a/deps/cherrypy/test/logtest.py b/deps/cherrypy/test/logtest.py new file mode 100644 index 00000000..27feac86 --- /dev/null +++ b/deps/cherrypy/test/logtest.py @@ -0,0 +1,200 @@ +"""logtest, a unittest.TestCase helper for testing log output.""" + +import sys +import time + +import six + +from cherrypy._cpcompat import text_or_bytes, ntob + + +try: + # On Windows, msvcrt.getch reads a single char without output. + import msvcrt + + def getchar(): + return msvcrt.getch() +except ImportError: + # Unix getchr + import tty + import termios + + def getchar(): + fd = sys.stdin.fileno() + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(sys.stdin.fileno()) + ch = sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + return ch + + +class LogCase(object): + + """unittest.TestCase mixin for testing log messages. + + logfile: a filename for the desired log. Yes, I know modes are evil, + but it makes the test functions so much cleaner to set this once. + + lastmarker: the last marker in the log. This can be used to search for + messages since the last marker. + + markerPrefix: a string with which to prefix log markers. This should be + unique enough from normal log output to use for marker identification. + """ + + logfile = None + lastmarker = None + markerPrefix = ntob("test suite marker: ") + + def _handleLogError(self, msg, data, marker, pattern): + print("") + print(" ERROR: %s" % msg) + + if not self.interactive: + raise self.failureException(msg) + + p = (" Show: " + "[L]og [M]arker [P]attern; " + "[I]gnore, [R]aise, or sys.e[X]it >> ") + sys.stdout.write(p + ' ') + # ARGH + sys.stdout.flush() + while True: + i = getchar().upper() + if i not in "MPLIRX": + continue + print(i.upper()) # Also prints new line + if i == "L": + for x, line in enumerate(data): + if (x + 1) % self.console_height == 0: + # The \r and comma should make the next line overwrite + sys.stdout.write("<-- More -->\r ") + m = getchar().lower() + # Erase our "More" prompt + sys.stdout.write(" \r ") + if m == "q": + break + print(line.rstrip()) + elif i == "M": + print(repr(marker or self.lastmarker)) + elif i == "P": + print(repr(pattern)) + elif i == "I": + # return without raising the normal exception + return + elif i == "R": + raise self.failureException(msg) + elif i == "X": + self.exit() + sys.stdout.write(p + ' ') + + def exit(self): + sys.exit() + + def emptyLog(self): + """Overwrite self.logfile with 0 bytes.""" + open(self.logfile, 'wb').write("") + + def markLog(self, key=None): + """Insert a marker line into the log and set self.lastmarker.""" + if key is None: + key = str(time.time()) + self.lastmarker = key + + open(self.logfile, 'ab+').write( + ntob("%s%s\n" % (self.markerPrefix, key), "utf-8")) + + def _read_marked_region(self, marker=None): + """Return lines from self.logfile in the marked region. + + If marker is None, self.lastmarker is used. If the log hasn't + been marked (using self.markLog), the entire log will be returned. + """ +# Give the logger time to finish writing? +# time.sleep(0.5) + + logfile = self.logfile + marker = marker or self.lastmarker + if marker is None: + return open(logfile, 'rb').readlines() + + if isinstance(marker, six.text_type): + marker = marker.encode('utf-8') + data = [] + in_region = False + for line in open(logfile, 'rb'): + if in_region: + if (line.startswith(self.markerPrefix) and not marker in line): + break + else: + data.append(line) + elif marker in line: + in_region = True + return data + + def assertInLog(self, line, marker=None): + """Fail if the given (partial) line is not in the log. + + The log will be searched from the given marker to the next marker. + If marker is None, self.lastmarker is used. If the log hasn't + been marked (using self.markLog), the entire log will be searched. + """ + data = self._read_marked_region(marker) + for logline in data: + if line in logline: + return + msg = "%r not found in log" % line + self._handleLogError(msg, data, marker, line) + + def assertNotInLog(self, line, marker=None): + """Fail if the given (partial) line is in the log. + + The log will be searched from the given marker to the next marker. + If marker is None, self.lastmarker is used. If the log hasn't + been marked (using self.markLog), the entire log will be searched. + """ + data = self._read_marked_region(marker) + for logline in data: + if line in logline: + msg = "%r found in log" % line + self._handleLogError(msg, data, marker, line) + + def assertLog(self, sliceargs, lines, marker=None): + """Fail if log.readlines()[sliceargs] is not contained in 'lines'. + + The log will be searched from the given marker to the next marker. + If marker is None, self.lastmarker is used. If the log hasn't + been marked (using self.markLog), the entire log will be searched. + """ + data = self._read_marked_region(marker) + if isinstance(sliceargs, int): + # Single arg. Use __getitem__ and allow lines to be str or list. + if isinstance(lines, (tuple, list)): + lines = lines[0] + if isinstance(lines, six.text_type): + lines = lines.encode('utf-8') + if lines not in data[sliceargs]: + msg = "%r not found on log line %r" % (lines, sliceargs) + self._handleLogError( + msg, + [data[sliceargs], "--EXTRA CONTEXT--"] + data[ + sliceargs + 1:sliceargs + 6], + marker, + lines) + else: + # Multiple args. Use __getslice__ and require lines to be list. + if isinstance(lines, tuple): + lines = list(lines) + elif isinstance(lines, text_or_bytes): + raise TypeError("The 'lines' arg must be a list when " + "'sliceargs' is a tuple.") + + start, stop = sliceargs + for line, logline in zip(lines, data[start:stop]): + if isinstance(line, six.text_type): + line = line.encode('utf-8') + if line not in logline: + msg = "%r not found in log" % line + self._handleLogError(msg, data[start:stop], marker, line) diff --git a/deps/cherrypy/test/modfastcgi.py b/deps/cherrypy/test/modfastcgi.py new file mode 100644 index 00000000..6d0e00c6 --- /dev/null +++ b/deps/cherrypy/test/modfastcgi.py @@ -0,0 +1,135 @@ +"""Wrapper for mod_fastcgi, for use as a CherryPy HTTP server when testing. + +To autostart fastcgi, the "apache" executable or script must be +on your system path, or you must override the global APACHE_PATH. +On some platforms, "apache" may be called "apachectl", "apache2ctl", +or "httpd"--create a symlink to them if needed. + +You'll also need the WSGIServer from flup.servers. +See http://projects.amor.org/misc/wiki/ModPythonGateway + + +KNOWN BUGS +========== + +1. Apache processes Range headers automatically; CherryPy's truncated + output is then truncated again by Apache. See test_core.testRanges. + This was worked around in http://www.cherrypy.org/changeset/1319. +2. Apache does not allow custom HTTP methods like CONNECT as per the spec. + See test_core.testHTTPMethods. +3. Max request header and body settings do not work with Apache. +4. Apache replaces status "reason phrases" automatically. For example, + CherryPy may set "304 Not modified" but Apache will write out + "304 Not Modified" (capital "M"). +5. Apache does not allow custom error codes as per the spec. +6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the + Request-URI too early. +7. mod_python will not read request bodies which use the "chunked" + transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block + instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and + mod_python's requestobject.c). +8. Apache will output a "Content-Length: 0" response header even if there's + no response entity body. This isn't really a bug; it just differs from + the CherryPy default. +""" + +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) +import re + +import cherrypy +from cherrypy.process import servers +from cherrypy.test import helper + + +def read_process(cmd, args=""): + pipein, pipeout = os.popen4("%s %s" % (cmd, args)) + try: + firstline = pipeout.readline() + if (re.search(r"(not recognized|No such file|not found)", firstline, + re.IGNORECASE)): + raise IOError('%s must be on your system path.' % cmd) + output = firstline + pipeout.read() + finally: + pipeout.close() + return output + + +APACHE_PATH = "apache2ctl" +CONF_PATH = "fastcgi.conf" + +conf_fastcgi = """ +# Apache2 server conf file for testing CherryPy with mod_fastcgi. +# fumanchu: I had to hard-code paths due to crazy Debian layouts :( +ServerRoot /usr/lib/apache2 +User #1000 +ErrorLog %(root)s/mod_fastcgi.error.log + +DocumentRoot "%(root)s" +ServerName 127.0.0.1 +Listen %(port)s +LoadModule fastcgi_module modules/mod_fastcgi.so +LoadModule rewrite_module modules/mod_rewrite.so + +Options +ExecCGI +SetHandler fastcgi-script +RewriteEngine On +RewriteRule ^(.*)$ /fastcgi.pyc [L] +FastCgiExternalServer "%(server)s" -host 127.0.0.1:4000 +""" + + +def erase_script_name(environ, start_response): + environ['SCRIPT_NAME'] = '' + return cherrypy.tree(environ, start_response) + + +class ModFCGISupervisor(helper.LocalWSGISupervisor): + + httpserver_class = "cherrypy.process.servers.FlupFCGIServer" + using_apache = True + using_wsgi = True + template = conf_fastcgi + + def __str__(self): + return "FCGI Server on %s:%s" % (self.host, self.port) + + def start(self, modulename): + cherrypy.server.httpserver = servers.FlupFCGIServer( + application=erase_script_name, bindAddress=('127.0.0.1', 4000)) + cherrypy.server.httpserver.bind_addr = ('127.0.0.1', 4000) + cherrypy.server.socket_port = 4000 + # For FCGI, we both start apache... + self.start_apache() + # ...and our local server + cherrypy.engine.start() + self.sync_apps() + + def start_apache(self): + fcgiconf = CONF_PATH + if not os.path.isabs(fcgiconf): + fcgiconf = os.path.join(curdir, fcgiconf) + + # Write the Apache conf file. + f = open(fcgiconf, 'wb') + try: + server = repr(os.path.join(curdir, 'fastcgi.pyc'))[1:-1] + output = self.template % {'port': self.port, 'root': curdir, + 'server': server} + output = output.replace('\r\n', '\n') + f.write(output) + finally: + f.close() + + result = read_process(APACHE_PATH, "-k start -f %s" % fcgiconf) + if result: + print(result) + + def stop(self): + """Gracefully shutdown a server that is serving forever.""" + read_process(APACHE_PATH, "-k stop") + helper.LocalWSGISupervisor.stop(self) + + def sync_apps(self): + cherrypy.server.httpserver.fcgiserver.application = self.get_app( + erase_script_name) diff --git a/deps/cherrypy/test/modfcgid.py b/deps/cherrypy/test/modfcgid.py new file mode 100644 index 00000000..fef9c6ef --- /dev/null +++ b/deps/cherrypy/test/modfcgid.py @@ -0,0 +1,123 @@ +"""Wrapper for mod_fcgid, for use as a CherryPy HTTP server when testing. + +To autostart fcgid, the "apache" executable or script must be +on your system path, or you must override the global APACHE_PATH. +On some platforms, "apache" may be called "apachectl", "apache2ctl", +or "httpd"--create a symlink to them if needed. + +You'll also need the WSGIServer from flup.servers. +See http://projects.amor.org/misc/wiki/ModPythonGateway + + +KNOWN BUGS +========== + +1. Apache processes Range headers automatically; CherryPy's truncated + output is then truncated again by Apache. See test_core.testRanges. + This was worked around in http://www.cherrypy.org/changeset/1319. +2. Apache does not allow custom HTTP methods like CONNECT as per the spec. + See test_core.testHTTPMethods. +3. Max request header and body settings do not work with Apache. +4. Apache replaces status "reason phrases" automatically. For example, + CherryPy may set "304 Not modified" but Apache will write out + "304 Not Modified" (capital "M"). +5. Apache does not allow custom error codes as per the spec. +6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the + Request-URI too early. +7. mod_python will not read request bodies which use the "chunked" + transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block + instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and + mod_python's requestobject.c). +8. Apache will output a "Content-Length: 0" response header even if there's + no response entity body. This isn't really a bug; it just differs from + the CherryPy default. +""" + +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) +import re + +import cherrypy +from cherrypy._cpcompat import ntob +from cherrypy.process import servers +from cherrypy.test import helper + + +def read_process(cmd, args=""): + pipein, pipeout = os.popen4("%s %s" % (cmd, args)) + try: + firstline = pipeout.readline() + if (re.search(r"(not recognized|No such file|not found)", firstline, + re.IGNORECASE)): + raise IOError('%s must be on your system path.' % cmd) + output = firstline + pipeout.read() + finally: + pipeout.close() + return output + + +APACHE_PATH = "httpd" +CONF_PATH = "fcgi.conf" + +conf_fcgid = """ +# Apache2 server conf file for testing CherryPy with mod_fcgid. + +DocumentRoot "%(root)s" +ServerName 127.0.0.1 +Listen %(port)s +LoadModule fastcgi_module modules/mod_fastcgi.dll +LoadModule rewrite_module modules/mod_rewrite.so + +Options ExecCGI +SetHandler fastcgi-script +RewriteEngine On +RewriteRule ^(.*)$ /fastcgi.pyc [L] +FastCgiExternalServer "%(server)s" -host 127.0.0.1:4000 +""" + + +class ModFCGISupervisor(helper.LocalSupervisor): + + using_apache = True + using_wsgi = True + template = conf_fcgid + + def __str__(self): + return "FCGI Server on %s:%s" % (self.host, self.port) + + def start(self, modulename): + cherrypy.server.httpserver = servers.FlupFCGIServer( + application=cherrypy.tree, bindAddress=('127.0.0.1', 4000)) + cherrypy.server.httpserver.bind_addr = ('127.0.0.1', 4000) + # For FCGI, we both start apache... + self.start_apache() + # ...and our local server + helper.LocalServer.start(self, modulename) + + def start_apache(self): + fcgiconf = CONF_PATH + if not os.path.isabs(fcgiconf): + fcgiconf = os.path.join(curdir, fcgiconf) + + # Write the Apache conf file. + f = open(fcgiconf, 'wb') + try: + server = repr(os.path.join(curdir, 'fastcgi.pyc'))[1:-1] + output = self.template % {'port': self.port, 'root': curdir, + 'server': server} + output = ntob(output.replace('\r\n', '\n')) + f.write(output) + finally: + f.close() + + result = read_process(APACHE_PATH, "-k start -f %s" % fcgiconf) + if result: + print(result) + + def stop(self): + """Gracefully shutdown a server that is serving forever.""" + read_process(APACHE_PATH, "-k stop") + helper.LocalServer.stop(self) + + def sync_apps(self): + cherrypy.server.httpserver.fcgiserver.application = self.get_app() diff --git a/deps/cherrypy/test/modpy.py b/deps/cherrypy/test/modpy.py new file mode 100644 index 00000000..45d19062 --- /dev/null +++ b/deps/cherrypy/test/modpy.py @@ -0,0 +1,164 @@ +"""Wrapper for mod_python, for use as a CherryPy HTTP server when testing. + +To autostart modpython, the "apache" executable or script must be +on your system path, or you must override the global APACHE_PATH. +On some platforms, "apache" may be called "apachectl" or "apache2ctl"-- +create a symlink to them if needed. + +If you wish to test the WSGI interface instead of our _cpmodpy interface, +you also need the 'modpython_gateway' module at: +http://projects.amor.org/misc/wiki/ModPythonGateway + + +KNOWN BUGS +========== + +1. Apache processes Range headers automatically; CherryPy's truncated + output is then truncated again by Apache. See test_core.testRanges. + This was worked around in http://www.cherrypy.org/changeset/1319. +2. Apache does not allow custom HTTP methods like CONNECT as per the spec. + See test_core.testHTTPMethods. +3. Max request header and body settings do not work with Apache. +4. Apache replaces status "reason phrases" automatically. For example, + CherryPy may set "304 Not modified" but Apache will write out + "304 Not Modified" (capital "M"). +5. Apache does not allow custom error codes as per the spec. +6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the + Request-URI too early. +7. mod_python will not read request bodies which use the "chunked" + transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block + instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and + mod_python's requestobject.c). +8. Apache will output a "Content-Length: 0" response header even if there's + no response entity body. This isn't really a bug; it just differs from + the CherryPy default. +""" + +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) +import re + +from cherrypy.test import helper + + +def read_process(cmd, args=""): + pipein, pipeout = os.popen4("%s %s" % (cmd, args)) + try: + firstline = pipeout.readline() + if (re.search(r"(not recognized|No such file|not found)", firstline, + re.IGNORECASE)): + raise IOError('%s must be on your system path.' % cmd) + output = firstline + pipeout.read() + finally: + pipeout.close() + return output + + +APACHE_PATH = "httpd" +CONF_PATH = "test_mp.conf" + +conf_modpython_gateway = """ +# Apache2 server conf file for testing CherryPy with modpython_gateway. + +ServerName 127.0.0.1 +DocumentRoot "/" +Listen %(port)s +LoadModule python_module modules/mod_python.so + +SetHandler python-program +PythonFixupHandler cherrypy.test.modpy::wsgisetup +PythonOption testmod %(modulename)s +PythonHandler modpython_gateway::handler +PythonOption wsgi.application cherrypy::tree +PythonOption socket_host %(host)s +PythonDebug On +""" + +conf_cpmodpy = """ +# Apache2 server conf file for testing CherryPy with _cpmodpy. + +ServerName 127.0.0.1 +DocumentRoot "/" +Listen %(port)s +LoadModule python_module modules/mod_python.so + +SetHandler python-program +PythonFixupHandler cherrypy.test.modpy::cpmodpysetup +PythonHandler cherrypy._cpmodpy::handler +PythonOption cherrypy.setup cherrypy.test.%(modulename)s::setup_server +PythonOption socket_host %(host)s +PythonDebug On +""" + + +class ModPythonSupervisor(helper.Supervisor): + + using_apache = True + using_wsgi = False + template = None + + def __str__(self): + return "ModPython Server on %s:%s" % (self.host, self.port) + + def start(self, modulename): + mpconf = CONF_PATH + if not os.path.isabs(mpconf): + mpconf = os.path.join(curdir, mpconf) + + f = open(mpconf, 'wb') + try: + f.write(self.template % + {'port': self.port, 'modulename': modulename, + 'host': self.host}) + finally: + f.close() + + result = read_process(APACHE_PATH, "-k start -f %s" % mpconf) + if result: + print(result) + + def stop(self): + """Gracefully shutdown a server that is serving forever.""" + read_process(APACHE_PATH, "-k stop") + + +loaded = False + + +def wsgisetup(req): + global loaded + if not loaded: + loaded = True + options = req.get_options() + + import cherrypy + cherrypy.config.update({ + "log.error_file": os.path.join(curdir, "test.log"), + "environment": "test_suite", + "server.socket_host": options['socket_host'], + }) + + modname = options['testmod'] + mod = __import__(modname, globals(), locals(), ['']) + mod.setup_server() + + cherrypy.server.unsubscribe() + cherrypy.engine.start() + from mod_python import apache + return apache.OK + + +def cpmodpysetup(req): + global loaded + if not loaded: + loaded = True + options = req.get_options() + + import cherrypy + cherrypy.config.update({ + "log.error_file": os.path.join(curdir, "test.log"), + "environment": "test_suite", + "server.socket_host": options['socket_host'], + }) + from mod_python import apache + return apache.OK diff --git a/deps/cherrypy/test/modwsgi.py b/deps/cherrypy/test/modwsgi.py new file mode 100644 index 00000000..043fb6e8 --- /dev/null +++ b/deps/cherrypy/test/modwsgi.py @@ -0,0 +1,150 @@ +"""Wrapper for mod_wsgi, for use as a CherryPy HTTP server. + +To autostart modwsgi, the "apache" executable or script must be +on your system path, or you must override the global APACHE_PATH. +On some platforms, "apache" may be called "apachectl" or "apache2ctl"-- +create a symlink to them if needed. + + +KNOWN BUGS +========== + +##1. Apache processes Range headers automatically; CherryPy's truncated +## output is then truncated again by Apache. See test_core.testRanges. +## This was worked around in http://www.cherrypy.org/changeset/1319. +2. Apache does not allow custom HTTP methods like CONNECT as per the spec. + See test_core.testHTTPMethods. +3. Max request header and body settings do not work with Apache. +##4. Apache replaces status "reason phrases" automatically. For example, +## CherryPy may set "304 Not modified" but Apache will write out +## "304 Not Modified" (capital "M"). +##5. Apache does not allow custom error codes as per the spec. +##6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the +## Request-URI too early. +7. mod_wsgi will not read request bodies which use the "chunked" + transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block + instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and + mod_python's requestobject.c). +8. When responding with 204 No Content, mod_wsgi adds a Content-Length + header for you. +9. When an error is raised, mod_wsgi has no facility for printing a + traceback as the response content (it's sent to the Apache log instead). +10. Startup and shutdown of Apache when running mod_wsgi seems slow. +""" + +import os +curdir = os.path.abspath(os.path.dirname(__file__)) +import re +import sys +import time + +import cherrypy +from cherrypy.test import helper, webtest + + +def read_process(cmd, args=""): + pipein, pipeout = os.popen4("%s %s" % (cmd, args)) + try: + firstline = pipeout.readline() + if (re.search(r"(not recognized|No such file|not found)", firstline, + re.IGNORECASE)): + raise IOError('%s must be on your system path.' % cmd) + output = firstline + pipeout.read() + finally: + pipeout.close() + return output + + +if sys.platform == 'win32': + APACHE_PATH = "httpd" +else: + APACHE_PATH = "apache" + +CONF_PATH = "test_mw.conf" + +conf_modwsgi = r""" +# Apache2 server conf file for testing CherryPy with modpython_gateway. + +ServerName 127.0.0.1 +DocumentRoot "/" +Listen %(port)s + +AllowEncodedSlashes On +LoadModule rewrite_module modules/mod_rewrite.so +RewriteEngine on +RewriteMap escaping int:escape + +LoadModule log_config_module modules/mod_log_config.so +LogFormat "%%h %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-agent}i\"" combined +CustomLog "%(curdir)s/apache.access.log" combined +ErrorLog "%(curdir)s/apache.error.log" +LogLevel debug + +LoadModule wsgi_module modules/mod_wsgi.so +LoadModule env_module modules/mod_env.so + +WSGIScriptAlias / "%(curdir)s/modwsgi.py" +SetEnv testmod %(testmod)s +""" + + +class ModWSGISupervisor(helper.Supervisor): + + """Server Controller for ModWSGI and CherryPy.""" + + using_apache = True + using_wsgi = True + template = conf_modwsgi + + def __str__(self): + return "ModWSGI Server on %s:%s" % (self.host, self.port) + + def start(self, modulename): + mpconf = CONF_PATH + if not os.path.isabs(mpconf): + mpconf = os.path.join(curdir, mpconf) + + f = open(mpconf, 'wb') + try: + output = (self.template % + {'port': self.port, 'testmod': modulename, + 'curdir': curdir}) + f.write(output) + finally: + f.close() + + result = read_process(APACHE_PATH, "-k start -f %s" % mpconf) + if result: + print(result) + + # Make a request so mod_wsgi starts up our app. + # If we don't, concurrent initial requests will 404. + cherrypy._cpserver.wait_for_occupied_port("127.0.0.1", self.port) + webtest.openURL('/ihopetheresnodefault', port=self.port) + time.sleep(1) + + def stop(self): + """Gracefully shutdown a server that is serving forever.""" + read_process(APACHE_PATH, "-k stop") + + +loaded = False + + +def application(environ, start_response): + import cherrypy + global loaded + if not loaded: + loaded = True + modname = "cherrypy.test." + environ['testmod'] + mod = __import__(modname, globals(), locals(), ['']) + mod.setup_server() + + cherrypy.config.update({ + "log.error_file": os.path.join(curdir, "test.error.log"), + "log.access_file": os.path.join(curdir, "test.access.log"), + "environment": "test_suite", + "engine.SIGHUP": None, + "engine.SIGTERM": None, + }) + return cherrypy.tree(environ, start_response) diff --git a/deps/cherrypy/test/sessiondemo.py b/deps/cherrypy/test/sessiondemo.py new file mode 100644 index 00000000..4b3d1222 --- /dev/null +++ b/deps/cherrypy/test/sessiondemo.py @@ -0,0 +1,158 @@ +#!/usr/bin/python +"""A session demonstration app.""" + +import calendar +from datetime import datetime +import sys +import cherrypy +from cherrypy.lib import sessions +from cherrypy._cpcompat import copyitems + + +page = """ + + + + + + + +

Session Demo

+

Reload this page. The session ID should not change from one reload to the next

+

Index | Expire | Regenerate

+ + + + + + + + + +
Session ID:%(sessionid)s

%(changemsg)s

Request Cookie%(reqcookie)s
Response Cookie%(respcookie)s

Session Data%(sessiondata)s
Server Time%(servertime)s (Unix time: %(serverunixtime)s)
Browser Time 
Cherrypy Version:%(cpversion)s
Python Version:%(pyversion)s
+ +""" + + +class Root(object): + + def page(self): + changemsg = [] + if cherrypy.session.id != cherrypy.session.originalid: + if cherrypy.session.originalid is None: + changemsg.append( + 'Created new session because no session id was given.') + if cherrypy.session.missing: + changemsg.append( + 'Created new session due to missing ' + '(expired or malicious) session.') + if cherrypy.session.regenerated: + changemsg.append('Application generated a new session.') + + try: + expires = cherrypy.response.cookie['session_id']['expires'] + except KeyError: + expires = '' + + return page % { + 'sessionid': cherrypy.session.id, + 'changemsg': '
'.join(changemsg), + 'respcookie': cherrypy.response.cookie.output(), + 'reqcookie': cherrypy.request.cookie.output(), + 'sessiondata': copyitems(cherrypy.session), + 'servertime': ( + datetime.utcnow().strftime("%Y/%m/%d %H:%M") + " UTC" + ), + 'serverunixtime': calendar.timegm(datetime.utcnow().timetuple()), + 'cpversion': cherrypy.__version__, + 'pyversion': sys.version, + 'expires': expires, + } + + @cherrypy.expose + def index(self): + # Must modify data or the session will not be saved. + cherrypy.session['color'] = 'green' + return self.page() + + @cherrypy.expose + def expire(self): + sessions.expire() + return self.page() + + @cherrypy.expose + def regen(self): + cherrypy.session.regenerate() + # Must modify data or the session will not be saved. + cherrypy.session['color'] = 'yellow' + return self.page() + +if __name__ == '__main__': + cherrypy.config.update({ + #'environment': 'production', + 'log.screen': True, + 'tools.sessions.on': True, + }) + cherrypy.quickstart(Root()) diff --git a/deps/cherrypy/test/test_auth_basic.py b/deps/cherrypy/test/test_auth_basic.py new file mode 100644 index 00000000..b70f2902 --- /dev/null +++ b/deps/cherrypy/test/test_auth_basic.py @@ -0,0 +1,95 @@ +# This file is part of CherryPy +# -*- coding: utf-8 -*- +# vim:ts=4:sw=4:expandtab:fileencoding=utf-8 + +from hashlib import md5 + +import cherrypy +from cherrypy._cpcompat import ntob +from cherrypy.lib import auth_basic +from cherrypy.test import helper + + +class BasicAuthTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "This is public." + + class BasicProtected: + + @cherrypy.expose + def index(self): + return "Hello %s, you've been authorized." % ( + cherrypy.request.login) + + class BasicProtected2: + + @cherrypy.expose + def index(self): + return "Hello %s, you've been authorized." % ( + cherrypy.request.login) + + userpassdict = {'xuser': 'xpassword'} + userhashdict = {'xuser': md5(ntob('xpassword')).hexdigest()} + + def checkpasshash(realm, user, password): + p = userhashdict.get(user) + return p and p == md5(ntob(password)).hexdigest() or False + + basic_checkpassword_dict = auth_basic.checkpassword_dict(userpassdict) + conf = { + '/basic': { + 'tools.auth_basic.on': True, + 'tools.auth_basic.realm': 'wonderland', + 'tools.auth_basic.checkpassword': basic_checkpassword_dict + }, + '/basic2': { + 'tools.auth_basic.on': True, + 'tools.auth_basic.realm': 'wonderland', + 'tools.auth_basic.checkpassword': checkpasshash + }, + } + + root = Root() + root.basic = BasicProtected() + root.basic2 = BasicProtected2() + cherrypy.tree.mount(root, config=conf) + + def testPublic(self): + self.getPage("/") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html;charset=utf-8') + self.assertBody('This is public.') + + def testBasic(self): + self.getPage("/basic/") + self.assertStatus(401) + self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"') + + self.getPage('/basic/', + [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')]) + self.assertStatus(401) + + self.getPage('/basic/', + [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')]) + self.assertStatus('200 OK') + self.assertBody("Hello xuser, you've been authorized.") + + def testBasic2(self): + self.getPage("/basic2/") + self.assertStatus(401) + self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"') + + self.getPage('/basic2/', + [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')]) + self.assertStatus(401) + + self.getPage('/basic2/', + [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')]) + self.assertStatus('200 OK') + self.assertBody("Hello xuser, you've been authorized.") diff --git a/deps/cherrypy/test/test_auth_digest.py b/deps/cherrypy/test/test_auth_digest.py new file mode 100644 index 00000000..bcb9431f --- /dev/null +++ b/deps/cherrypy/test/test_auth_digest.py @@ -0,0 +1,139 @@ +# This file is part of CherryPy +# -*- coding: utf-8 -*- +# vim:ts=4:sw=4:expandtab:fileencoding=utf-8 + + +import cherrypy +from cherrypy.lib import auth_digest + +from cherrypy.test import helper + + +class DigestAuthTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "This is public." + + class DigestProtected: + + @cherrypy.expose + def index(self): + return "Hello %s, you've been authorized." % ( + cherrypy.request.login) + + def fetch_users(): + return {'test': 'test'} + + get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(fetch_users()) + conf = {'/digest': {'tools.auth_digest.on': True, + 'tools.auth_digest.realm': 'localhost', + 'tools.auth_digest.get_ha1': get_ha1, + 'tools.auth_digest.key': 'a565c27146791cfb', + 'tools.auth_digest.debug': 'True'}} + + root = Root() + root.digest = DigestProtected() + cherrypy.tree.mount(root, config=conf) + + def testPublic(self): + self.getPage("/") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html;charset=utf-8') + self.assertBody('This is public.') + + def testDigest(self): + self.getPage("/digest/") + self.assertStatus(401) + + value = None + for k, v in self.headers: + if k.lower() == "www-authenticate": + if v.startswith("Digest"): + value = v + break + + if value is None: + self._handlewebError( + "Digest authentification scheme was not found") + + value = value[7:] + items = value.split(', ') + tokens = {} + for item in items: + key, value = item.split('=') + tokens[key.lower()] = value + + missing_msg = "%s is missing" + bad_value_msg = "'%s' was expecting '%s' but found '%s'" + nonce = None + if 'realm' not in tokens: + self._handlewebError(missing_msg % 'realm') + elif tokens['realm'] != '"localhost"': + self._handlewebError(bad_value_msg % + ('realm', '"localhost"', tokens['realm'])) + if 'nonce' not in tokens: + self._handlewebError(missing_msg % 'nonce') + else: + nonce = tokens['nonce'].strip('"') + if 'algorithm' not in tokens: + self._handlewebError(missing_msg % 'algorithm') + elif tokens['algorithm'] != '"MD5"': + self._handlewebError(bad_value_msg % + ('algorithm', '"MD5"', tokens['algorithm'])) + if 'qop' not in tokens: + self._handlewebError(missing_msg % 'qop') + elif tokens['qop'] != '"auth"': + self._handlewebError(bad_value_msg % + ('qop', '"auth"', tokens['qop'])) + + get_ha1 = auth_digest.get_ha1_dict_plain({'test': 'test'}) + + # Test user agent response with a wrong value for 'realm' + base_auth = ('Digest username="test", ' + 'realm="wrong realm", ' + 'nonce="%s", ' + 'uri="/digest/", ' + 'algorithm=MD5, ' + 'response="%s", ' + 'qop=auth, ' + 'nc=%s, ' + 'cnonce="1522e61005789929"') + + auth_header = base_auth % ( + nonce, '11111111111111111111111111111111', '00000001') + auth = auth_digest.HttpDigestAuthorization(auth_header, 'GET') + # calculate the response digest + ha1 = get_ha1(auth.realm, 'test') + response = auth.request_digest(ha1) + # send response with correct response digest, but wrong realm + auth_header = base_auth % (nonce, response, '00000001') + self.getPage('/digest/', [('Authorization', auth_header)]) + self.assertStatus(401) + + # Test that must pass + base_auth = ('Digest username="test", ' + 'realm="localhost", ' + 'nonce="%s", ' + 'uri="/digest/", ' + 'algorithm=MD5, ' + 'response="%s", ' + 'qop=auth, ' + 'nc=%s, ' + 'cnonce="1522e61005789929"') + + auth_header = base_auth % ( + nonce, '11111111111111111111111111111111', '00000001') + auth = auth_digest.HttpDigestAuthorization(auth_header, 'GET') + # calculate the response digest + ha1 = get_ha1('localhost', 'test') + response = auth.request_digest(ha1) + # send response with correct response digest + auth_header = base_auth % (nonce, response, '00000001') + self.getPage('/digest/', [('Authorization', auth_header)]) + self.assertStatus('200 OK') + self.assertBody("Hello test, you've been authorized.") diff --git a/deps/cherrypy/test/test_bus.py b/deps/cherrypy/test/test_bus.py new file mode 100644 index 00000000..6c927ffb --- /dev/null +++ b/deps/cherrypy/test/test_bus.py @@ -0,0 +1,275 @@ +import threading +import time +import unittest + +from cherrypy._cpcompat import get_daemon +from cherrypy.process import wspbus + + +msg = "Listener %d on channel %s: %s." + + +class PublishSubscribeTests(unittest.TestCase): + + def get_listener(self, channel, index): + def listener(arg=None): + self.responses.append(msg % (index, channel, arg)) + return listener + + def test_builtin_channels(self): + b = wspbus.Bus() + + self.responses, expected = [], [] + + for channel in b.listeners: + for index, priority in enumerate([100, 50, 0, 51]): + b.subscribe(channel, + self.get_listener(channel, index), priority) + + for channel in b.listeners: + b.publish(channel) + expected.extend([msg % (i, channel, None) for i in (2, 1, 3, 0)]) + b.publish(channel, arg=79347) + expected.extend([msg % (i, channel, 79347) for i in (2, 1, 3, 0)]) + + self.assertEqual(self.responses, expected) + + def test_custom_channels(self): + b = wspbus.Bus() + + self.responses, expected = [], [] + + custom_listeners = ('hugh', 'louis', 'dewey') + for channel in custom_listeners: + for index, priority in enumerate([None, 10, 60, 40]): + b.subscribe(channel, + self.get_listener(channel, index), priority) + + for channel in custom_listeners: + b.publish(channel, 'ah so') + expected.extend([msg % (i, channel, 'ah so') + for i in (1, 3, 0, 2)]) + b.publish(channel) + expected.extend([msg % (i, channel, None) for i in (1, 3, 0, 2)]) + + self.assertEqual(self.responses, expected) + + def test_listener_errors(self): + b = wspbus.Bus() + + self.responses, expected = [], [] + channels = [c for c in b.listeners if c != 'log'] + + for channel in channels: + b.subscribe(channel, self.get_listener(channel, 1)) + # This will break since the lambda takes no args. + b.subscribe(channel, lambda: None, priority=20) + + for channel in channels: + self.assertRaises(wspbus.ChannelFailures, b.publish, channel, 123) + expected.append(msg % (1, channel, 123)) + + self.assertEqual(self.responses, expected) + + +class BusMethodTests(unittest.TestCase): + + def log(self, bus): + self._log_entries = [] + + def logit(msg, level): + self._log_entries.append(msg) + bus.subscribe('log', logit) + + def assertLog(self, entries): + self.assertEqual(self._log_entries, entries) + + def get_listener(self, channel, index): + def listener(arg=None): + self.responses.append(msg % (index, channel, arg)) + return listener + + def test_start(self): + b = wspbus.Bus() + self.log(b) + + self.responses = [] + num = 3 + for index in range(num): + b.subscribe('start', self.get_listener('start', index)) + + b.start() + try: + # The start method MUST call all 'start' listeners. + self.assertEqual( + set(self.responses), + set([msg % (i, 'start', None) for i in range(num)])) + # The start method MUST move the state to STARTED + # (or EXITING, if errors occur) + self.assertEqual(b.state, b.states.STARTED) + # The start method MUST log its states. + self.assertLog(['Bus STARTING', 'Bus STARTED']) + finally: + # Exit so the atexit handler doesn't complain. + b.exit() + + def test_stop(self): + b = wspbus.Bus() + self.log(b) + + self.responses = [] + num = 3 + for index in range(num): + b.subscribe('stop', self.get_listener('stop', index)) + + b.stop() + + # The stop method MUST call all 'stop' listeners. + self.assertEqual(set(self.responses), + set([msg % (i, 'stop', None) for i in range(num)])) + # The stop method MUST move the state to STOPPED + self.assertEqual(b.state, b.states.STOPPED) + # The stop method MUST log its states. + self.assertLog(['Bus STOPPING', 'Bus STOPPED']) + + def test_graceful(self): + b = wspbus.Bus() + self.log(b) + + self.responses = [] + num = 3 + for index in range(num): + b.subscribe('graceful', self.get_listener('graceful', index)) + + b.graceful() + + # The graceful method MUST call all 'graceful' listeners. + self.assertEqual( + set(self.responses), + set([msg % (i, 'graceful', None) for i in range(num)])) + # The graceful method MUST log its states. + self.assertLog(['Bus graceful']) + + def test_exit(self): + b = wspbus.Bus() + self.log(b) + + self.responses = [] + num = 3 + for index in range(num): + b.subscribe('stop', self.get_listener('stop', index)) + b.subscribe('exit', self.get_listener('exit', index)) + + b.exit() + + # The exit method MUST call all 'stop' listeners, + # and then all 'exit' listeners. + self.assertEqual(set(self.responses), + set([msg % (i, 'stop', None) for i in range(num)] + + [msg % (i, 'exit', None) for i in range(num)])) + # The exit method MUST move the state to EXITING + self.assertEqual(b.state, b.states.EXITING) + # The exit method MUST log its states. + self.assertLog( + ['Bus STOPPING', 'Bus STOPPED', 'Bus EXITING', 'Bus EXITED']) + + def test_wait(self): + b = wspbus.Bus() + + def f(method): + time.sleep(0.2) + getattr(b, method)() + + for method, states in [('start', [b.states.STARTED]), + ('stop', [b.states.STOPPED]), + ('start', + [b.states.STARTING, b.states.STARTED]), + ('exit', [b.states.EXITING]), + ]: + threading.Thread(target=f, args=(method,)).start() + b.wait(states) + + # The wait method MUST wait for the given state(s). + if b.state not in states: + self.fail("State %r not in %r" % (b.state, states)) + + def test_block(self): + b = wspbus.Bus() + self.log(b) + + def f(): + time.sleep(0.2) + b.exit() + + def g(): + time.sleep(0.4) + threading.Thread(target=f).start() + threading.Thread(target=g).start() + threads = [t for t in threading.enumerate() if not get_daemon(t)] + self.assertEqual(len(threads), 3) + + b.block() + + # The block method MUST wait for the EXITING state. + self.assertEqual(b.state, b.states.EXITING) + # The block method MUST wait for ALL non-main, non-daemon threads to + # finish. + threads = [t for t in threading.enumerate() if not get_daemon(t)] + self.assertEqual(len(threads), 1) + # The last message will mention an indeterminable thread name; ignore + # it + self.assertEqual(self._log_entries[:-1], + ['Bus STOPPING', 'Bus STOPPED', + 'Bus EXITING', 'Bus EXITED', + 'Waiting for child threads to terminate...']) + + def test_start_with_callback(self): + b = wspbus.Bus() + self.log(b) + try: + events = [] + + def f(*args, **kwargs): + events.append(("f", args, kwargs)) + + def g(): + events.append("g") + b.subscribe("start", g) + b.start_with_callback(f, (1, 3, 5), {"foo": "bar"}) + # Give wait() time to run f() + time.sleep(0.2) + + # The callback method MUST wait for the STARTED state. + self.assertEqual(b.state, b.states.STARTED) + # The callback method MUST run after all start methods. + self.assertEqual(events, ["g", ("f", (1, 3, 5), {"foo": "bar"})]) + finally: + b.exit() + + def test_log(self): + b = wspbus.Bus() + self.log(b) + self.assertLog([]) + + # Try a normal message. + expected = [] + for msg in ["O mah darlin'"] * 3 + ["Clementiiiiiiiine"]: + b.log(msg) + expected.append(msg) + self.assertLog(expected) + + # Try an error message + try: + foo + except NameError: + b.log("You are lost and gone forever", traceback=True) + lastmsg = self._log_entries[-1] + if "Traceback" not in lastmsg or "NameError" not in lastmsg: + self.fail("Last log message %r did not contain " + "the expected traceback." % lastmsg) + else: + self.fail("NameError was not raised as expected.") + + +if __name__ == "__main__": + unittest.main() diff --git a/deps/cherrypy/test/test_caching.py b/deps/cherrypy/test/test_caching.py new file mode 100644 index 00000000..bac74351 --- /dev/null +++ b/deps/cherrypy/test/test_caching.py @@ -0,0 +1,338 @@ +import datetime +from itertools import count +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) +import threading +import time + +import cherrypy +from cherrypy._cpcompat import next, ntob, quote, xrange +from cherrypy.lib import httputil + +gif_bytes = ntob( + 'GIF89a\x01\x00\x01\x00\x82\x00\x01\x99"\x1e\x00\x00\x00\x00\x00' + '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + '\x00,\x00\x00\x00\x00\x01\x00\x01\x00\x02\x03\x02\x08\t\x00;' +) + + +from cherrypy.test import helper + + +class CacheTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + + @cherrypy.config(**{'tools.caching.on': True}) + class Root: + + def __init__(self): + self.counter = 0 + self.control_counter = 0 + self.longlock = threading.Lock() + + @cherrypy.expose + def index(self): + self.counter += 1 + msg = "visit #%s" % self.counter + return msg + + @cherrypy.expose + def control(self): + self.control_counter += 1 + return "visit #%s" % self.control_counter + + @cherrypy.expose + def a_gif(self): + cherrypy.response.headers[ + 'Last-Modified'] = httputil.HTTPDate() + return gif_bytes + + @cherrypy.expose + def long_process(self, seconds='1'): + try: + self.longlock.acquire() + time.sleep(float(seconds)) + finally: + self.longlock.release() + return 'success!' + + @cherrypy.expose + def clear_cache(self, path): + cherrypy._cache.store[cherrypy.request.base + path].clear() + + @cherrypy.config(**{ + 'tools.caching.on': True, + 'tools.response_headers.on': True, + 'tools.response_headers.headers': [ + ('Vary', 'Our-Varying-Header') + ], + }) + class VaryHeaderCachingServer(object): + + def __init__(self): + self.counter = count(1) + + @cherrypy.expose + def index(self): + return "visit #%s" % next(self.counter) + + @cherrypy.config(**{ + 'tools.expires.on': True, + 'tools.expires.secs': 60, + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static', + 'tools.staticdir.root': curdir, + }) + class UnCached(object): + + @cherrypy.expose + @cherrypy.config(**{'tools.expires.secs': 0}) + def force(self): + cherrypy.response.headers['Etag'] = 'bibbitybobbityboo' + self._cp_config['tools.expires.force'] = True + self._cp_config['tools.expires.secs'] = 0 + return "being forceful" + + @cherrypy.expose + def dynamic(self): + cherrypy.response.headers['Etag'] = 'bibbitybobbityboo' + cherrypy.response.headers['Cache-Control'] = 'private' + return "D-d-d-dynamic!" + + @cherrypy.expose + def cacheable(self): + cherrypy.response.headers['Etag'] = 'bibbitybobbityboo' + return "Hi, I'm cacheable." + + @cherrypy.expose + @cherrypy.config(**{'tools.expires.secs': 86400}) + def specific(self): + cherrypy.response.headers[ + 'Etag'] = 'need_this_to_make_me_cacheable' + return "I am being specific" + + class Foo(object): + pass + + @cherrypy.expose + @cherrypy.config(**{'tools.expires.secs': Foo()}) + def wrongtype(self): + cherrypy.response.headers[ + 'Etag'] = 'need_this_to_make_me_cacheable' + return "Woops" + + cherrypy.tree.mount(Root()) + cherrypy.tree.mount(UnCached(), "/expires") + cherrypy.tree.mount(VaryHeaderCachingServer(), "/varying_headers") + cherrypy.config.update({'tools.gzip.on': True}) + + def testCaching(self): + elapsed = 0.0 + for trial in range(10): + self.getPage("/") + # The response should be the same every time, + # except for the Age response header. + self.assertBody('visit #1') + if trial != 0: + age = int(self.assertHeader("Age")) + self.assert_(age >= elapsed) + elapsed = age + + # POST, PUT, DELETE should not be cached. + self.getPage("/", method="POST") + self.assertBody('visit #2') + # Because gzip is turned on, the Vary header should always Vary for + # content-encoding + self.assertHeader('Vary', 'Accept-Encoding') + # The previous request should have invalidated the cache, + # so this request will recalc the response. + self.getPage("/", method="GET") + self.assertBody('visit #3') + # ...but this request should get the cached copy. + self.getPage("/", method="GET") + self.assertBody('visit #3') + self.getPage("/", method="DELETE") + self.assertBody('visit #4') + + # The previous request should have invalidated the cache, + # so this request will recalc the response. + self.getPage("/", method="GET", headers=[('Accept-Encoding', 'gzip')]) + self.assertHeader('Content-Encoding', 'gzip') + self.assertHeader('Vary') + self.assertEqual( + cherrypy.lib.encoding.decompress(self.body), ntob("visit #5")) + + # Now check that a second request gets the gzip header and gzipped body + # This also tests a bug in 3.0 to 3.0.2 whereby the cached, gzipped + # response body was being gzipped a second time. + self.getPage("/", method="GET", headers=[('Accept-Encoding', 'gzip')]) + self.assertHeader('Content-Encoding', 'gzip') + self.assertEqual( + cherrypy.lib.encoding.decompress(self.body), ntob("visit #5")) + + # Now check that a third request that doesn't accept gzip + # skips the cache (because the 'Vary' header denies it). + self.getPage("/", method="GET") + self.assertNoHeader('Content-Encoding') + self.assertBody('visit #6') + + def testVaryHeader(self): + self.getPage("/varying_headers/") + self.assertStatus("200 OK") + self.assertHeaderItemValue('Vary', 'Our-Varying-Header') + self.assertBody('visit #1') + + # Now check that different 'Vary'-fields don't evict each other. + # This test creates 2 requests with different 'Our-Varying-Header' + # and then tests if the first one still exists. + self.getPage("/varying_headers/", + headers=[('Our-Varying-Header', 'request 2')]) + self.assertStatus("200 OK") + self.assertBody('visit #2') + + self.getPage("/varying_headers/", + headers=[('Our-Varying-Header', 'request 2')]) + self.assertStatus("200 OK") + self.assertBody('visit #2') + + self.getPage("/varying_headers/") + self.assertStatus("200 OK") + self.assertBody('visit #1') + + def testExpiresTool(self): + # test setting an expires header + self.getPage("/expires/specific") + self.assertStatus("200 OK") + self.assertHeader("Expires") + + # test exceptions for bad time values + self.getPage("/expires/wrongtype") + self.assertStatus(500) + self.assertInBody("TypeError") + + # static content should not have "cache prevention" headers + self.getPage("/expires/index.html") + self.assertStatus("200 OK") + self.assertNoHeader("Pragma") + self.assertNoHeader("Cache-Control") + self.assertHeader("Expires") + + # dynamic content that sets indicators should not have + # "cache prevention" headers + self.getPage("/expires/cacheable") + self.assertStatus("200 OK") + self.assertNoHeader("Pragma") + self.assertNoHeader("Cache-Control") + self.assertHeader("Expires") + + self.getPage('/expires/dynamic') + self.assertBody("D-d-d-dynamic!") + # the Cache-Control header should be untouched + self.assertHeader("Cache-Control", "private") + self.assertHeader("Expires") + + # configure the tool to ignore indicators and replace existing headers + self.getPage("/expires/force") + self.assertStatus("200 OK") + # This also gives us a chance to test 0 expiry with no other headers + self.assertHeader("Pragma", "no-cache") + if cherrypy.server.protocol_version == "HTTP/1.1": + self.assertHeader("Cache-Control", "no-cache, must-revalidate") + self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT") + + # static content should now have "cache prevention" headers + self.getPage("/expires/index.html") + self.assertStatus("200 OK") + self.assertHeader("Pragma", "no-cache") + if cherrypy.server.protocol_version == "HTTP/1.1": + self.assertHeader("Cache-Control", "no-cache, must-revalidate") + self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT") + + # the cacheable handler should now have "cache prevention" headers + self.getPage("/expires/cacheable") + self.assertStatus("200 OK") + self.assertHeader("Pragma", "no-cache") + if cherrypy.server.protocol_version == "HTTP/1.1": + self.assertHeader("Cache-Control", "no-cache, must-revalidate") + self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT") + + self.getPage('/expires/dynamic') + self.assertBody("D-d-d-dynamic!") + # dynamic sets Cache-Control to private but it should be + # overwritten here ... + self.assertHeader("Pragma", "no-cache") + if cherrypy.server.protocol_version == "HTTP/1.1": + self.assertHeader("Cache-Control", "no-cache, must-revalidate") + self.assertHeader("Expires", "Sun, 28 Jan 2007 00:00:00 GMT") + + def testLastModified(self): + self.getPage("/a.gif") + self.assertStatus(200) + self.assertBody(gif_bytes) + lm1 = self.assertHeader("Last-Modified") + + # this request should get the cached copy. + self.getPage("/a.gif") + self.assertStatus(200) + self.assertBody(gif_bytes) + self.assertHeader("Age") + lm2 = self.assertHeader("Last-Modified") + self.assertEqual(lm1, lm2) + + # this request should match the cached copy, but raise 304. + self.getPage("/a.gif", [('If-Modified-Since', lm1)]) + self.assertStatus(304) + self.assertNoHeader("Last-Modified") + if not getattr(cherrypy.server, "using_apache", False): + self.assertHeader("Age") + + def test_antistampede(self): + SECONDS = 4 + slow_url = "/long_process?seconds={SECONDS}".format(**locals()) + # We MUST make an initial synchronous request in order to create the + # AntiStampedeCache object, and populate its selecting_headers, + # before the actual stampede. + self.getPage(slow_url) + self.assertBody('success!') + self.getPage("/clear_cache?path=" + quote(slow_url, safe='')) + self.assertStatus(200) + + start = datetime.datetime.now() + + def run(): + self.getPage(slow_url) + # The response should be the same every time + self.assertBody('success!') + ts = [threading.Thread(target=run) for i in xrange(100)] + for t in ts: + t.start() + for t in ts: + t.join() + finish = datetime.datetime.now() + # Allow for overhead, two seconds for slow hosts + allowance = SECONDS + 2 + self.assertEqualDates(start, finish, seconds=allowance) + + def test_cache_control(self): + self.getPage("/control") + self.assertBody('visit #1') + self.getPage("/control") + self.assertBody('visit #1') + + self.getPage("/control", headers=[('Cache-Control', 'no-cache')]) + self.assertBody('visit #2') + self.getPage("/control") + self.assertBody('visit #2') + + self.getPage("/control", headers=[('Pragma', 'no-cache')]) + self.assertBody('visit #3') + self.getPage("/control") + self.assertBody('visit #3') + + time.sleep(1) + self.getPage("/control", headers=[('Cache-Control', 'max-age=0')]) + self.assertBody('visit #4') + self.getPage("/control") + self.assertBody('visit #4') diff --git a/deps/cherrypy/test/test_compat.py b/deps/cherrypy/test/test_compat.py new file mode 100644 index 00000000..c31316b1 --- /dev/null +++ b/deps/cherrypy/test/test_compat.py @@ -0,0 +1,23 @@ +from __future__ import unicode_literals + +import unittest + +import six + +import nose + +from cherrypy import _cpcompat as compat + + +class StringTester(unittest.TestCase): + + def test_ntob_non_native(self): + """ + ntob should raise an Exception on unicode. + (Python 2 only) + + See #1132 for discussion. + """ + if six.PY3: + raise nose.SkipTest("Only useful on Python 2") + self.assertRaises(Exception, compat.ntob, 'fight') diff --git a/deps/cherrypy/test/test_config.py b/deps/cherrypy/test/test_config.py new file mode 100644 index 00000000..088f95d4 --- /dev/null +++ b/deps/cherrypy/test/test_config.py @@ -0,0 +1,303 @@ +"""Tests for the CherryPy configuration system.""" + +import io +import os +import sys +import unittest + +import six + +import cherrypy +import cherrypy._cpcompat as compat + +localDir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + + +StringIOFromNative = lambda x: io.StringIO(six.text_type(x)) + + +def setup_server(): + + @cherrypy.config(foo='this', bar='that') + class Root: + + def __init__(self): + cherrypy.config.namespaces['db'] = self.db_namespace + + def db_namespace(self, k, v): + if k == "scheme": + self.db = v + + @cherrypy.expose(alias=('global_', 'xyz')) + def index(self, key): + return cherrypy.request.config.get(key, "None") + + @cherrypy.expose + def repr(self, key): + return repr(cherrypy.request.config.get(key, None)) + + @cherrypy.expose + def dbscheme(self): + return self.db + + @cherrypy.expose + @cherrypy.config(**{'request.body.attempt_charsets': ['utf-16']}) + def plain(self, x): + return x + + favicon_ico = cherrypy.tools.staticfile.handler( + filename=os.path.join(localDir, '../favicon.ico')) + + @cherrypy.config(foo='this2', baz='that2') + class Foo: + + @cherrypy.expose + def index(self, key): + return cherrypy.request.config.get(key, "None") + nex = index + + @cherrypy.expose + @cherrypy.config(**{'response.headers.X-silly': 'sillyval'}) + def silly(self): + return 'Hello world' + + # Test the expose and config decorators + @cherrypy.config(foo='this3', **{'bax': 'this4'}) + @cherrypy.expose + def bar(self, key): + return repr(cherrypy.request.config.get(key, None)) + + class Another: + + @cherrypy.expose + def index(self, key): + return str(cherrypy.request.config.get(key, "None")) + + def raw_namespace(key, value): + if key == 'input.map': + handler = cherrypy.request.handler + + def wrapper(): + params = cherrypy.request.params + for name, coercer in list(value.items()): + try: + params[name] = coercer(params[name]) + except KeyError: + pass + return handler() + cherrypy.request.handler = wrapper + elif key == 'output': + handler = cherrypy.request.handler + + def wrapper(): + # 'value' is a type (like int or str). + return value(handler()) + cherrypy.request.handler = wrapper + + @cherrypy.config(**{'raw.output': repr}) + class Raw: + + @cherrypy.expose + @cherrypy.config(**{'raw.input.map': {'num': int}}) + def incr(self, num): + return num + 1 + + if not six.PY3: + thing3 = "thing3: unicode('test', errors='ignore')" + else: + thing3 = '' + + ioconf = StringIOFromNative(""" +[/] +neg: -1234 +filename: os.path.join(sys.prefix, "hello.py") +thing1: cherrypy.lib.httputil.response_codes[404] +thing2: __import__('cherrypy.tutorial', globals(), locals(), ['']).thing2 +%s +complex: 3+2j +mul: 6*3 +ones: "11" +twos: "22" +stradd: %%(ones)s + %%(twos)s + "33" + +[/favicon.ico] +tools.staticfile.filename = %r +""" % (thing3, os.path.join(localDir, 'static/dirback.jpg'))) + + root = Root() + root.foo = Foo() + root.raw = Raw() + app = cherrypy.tree.mount(root, config=ioconf) + app.request_class.namespaces['raw'] = raw_namespace + + cherrypy.tree.mount(Another(), "/another") + cherrypy.config.update({'luxuryyacht': 'throatwobblermangrove', + 'db.scheme': r"sqlite///memory", + }) + + +# Client-side code # + +from cherrypy.test import helper + + +class ConfigTests(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def testConfig(self): + tests = [ + ('/', 'nex', 'None'), + ('/', 'foo', 'this'), + ('/', 'bar', 'that'), + ('/xyz', 'foo', 'this'), + ('/foo/', 'foo', 'this2'), + ('/foo/', 'bar', 'that'), + ('/foo/', 'bax', 'None'), + ('/foo/bar', 'baz', "'that2'"), + ('/foo/nex', 'baz', 'that2'), + # If 'foo' == 'this', then the mount point '/another' leaks into + # '/'. + ('/another/', 'foo', 'None'), + ] + for path, key, expected in tests: + self.getPage(path + "?key=" + key) + self.assertBody(expected) + + expectedconf = { + # From CP defaults + 'tools.log_headers.on': False, + 'tools.log_tracebacks.on': True, + 'request.show_tracebacks': True, + 'log.screen': False, + 'environment': 'test_suite', + 'engine.autoreload.on': False, + # From global config + 'luxuryyacht': 'throatwobblermangrove', + # From Root._cp_config + 'bar': 'that', + # From Foo._cp_config + 'baz': 'that2', + # From Foo.bar._cp_config + 'foo': 'this3', + 'bax': 'this4', + } + for key, expected in expectedconf.items(): + self.getPage("/foo/bar?key=" + key) + self.assertBody(repr(expected)) + + def testUnrepr(self): + self.getPage("/repr?key=neg") + self.assertBody("-1234") + + self.getPage("/repr?key=filename") + self.assertBody(repr(os.path.join(sys.prefix, "hello.py"))) + + self.getPage("/repr?key=thing1") + self.assertBody(repr(cherrypy.lib.httputil.response_codes[404])) + + if not getattr(cherrypy.server, "using_apache", False): + # The object ID's won't match up when using Apache, since the + # server and client are running in different processes. + self.getPage("/repr?key=thing2") + from cherrypy.tutorial import thing2 + self.assertBody(repr(thing2)) + + if not six.PY3: + self.getPage("/repr?key=thing3") + self.assertBody(repr(unicode('test'))) + + self.getPage("/repr?key=complex") + self.assertBody("(3+2j)") + + self.getPage("/repr?key=mul") + self.assertBody("18") + + self.getPage("/repr?key=stradd") + self.assertBody(repr("112233")) + + def testRespNamespaces(self): + self.getPage("/foo/silly") + self.assertHeader('X-silly', 'sillyval') + self.assertBody('Hello world') + + def testCustomNamespaces(self): + self.getPage("/raw/incr?num=12") + self.assertBody("13") + + self.getPage("/dbscheme") + self.assertBody(r"sqlite///memory") + + def testHandlerToolConfigOverride(self): + # Assert that config overrides tool constructor args. Above, we set + # the favicon in the page handler to be '../favicon.ico', + # but then overrode it in config to be './static/dirback.jpg'. + self.getPage("/favicon.ico") + self.assertBody(open(os.path.join(localDir, "static/dirback.jpg"), + "rb").read()) + + def test_request_body_namespace(self): + self.getPage("/plain", method='POST', headers=[ + ('Content-Type', 'application/x-www-form-urlencoded'), + ('Content-Length', '13')], + body=compat.ntob('\xff\xfex\x00=\xff\xfea\x00b\x00c\x00')) + self.assertBody("abc") + + +class VariableSubstitutionTests(unittest.TestCase): + setup_server = staticmethod(setup_server) + + def test_config(self): + from textwrap import dedent + + # variable substitution with [DEFAULT] + conf = dedent(""" + [DEFAULT] + dir = "/some/dir" + my.dir = %(dir)s + "/sub" + + [my] + my.dir = %(dir)s + "/my/dir" + my.dir2 = %(my.dir)s + '/dir2' + + """) + + fp = StringIOFromNative(conf) + + cherrypy.config.update(fp) + self.assertEqual(cherrypy.config["my"]["my.dir"], "/some/dir/my/dir") + self.assertEqual(cherrypy.config["my"] + ["my.dir2"], "/some/dir/my/dir/dir2") + + +class CallablesInConfigTest(unittest.TestCase): + setup_server = staticmethod(setup_server) + + + def test_call_with_literal_dict(self): + from textwrap import dedent + conf = dedent(""" + [my] + value = dict(**{'foo': 'bar'}) + """) + fp = StringIOFromNative(conf) + cherrypy.config.update(fp) + self.assertEqual(cherrypy.config['my']['value'], {'foo': 'bar'}) + + def test_call_with_kwargs(self): + from textwrap import dedent + conf = dedent(""" + [my] + value = dict(foo="buzz", **cherrypy._test_dict) + """) + test_dict = { + "foo": "bar", + "bar": "foo", + "fizz": "buzz" + } + cherrypy._test_dict = test_dict + fp = StringIOFromNative(conf) + cherrypy.config.update(fp) + test_dict['foo'] = 'buzz' + self.assertEqual(cherrypy.config['my']['value']['foo'], 'buzz') + self.assertEqual(cherrypy.config['my']['value'], test_dict) + del cherrypy._test_dict diff --git a/deps/cherrypy/test/test_config_server.py b/deps/cherrypy/test/test_config_server.py new file mode 100644 index 00000000..0ef11dec --- /dev/null +++ b/deps/cherrypy/test/test_config_server.py @@ -0,0 +1,125 @@ +"""Tests for the CherryPy configuration system.""" + +import os +localDir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + +import cherrypy + + +# Client-side code # + +from cherrypy.test import helper + + +class ServerConfigTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + + class Root: + + @cherrypy.expose + def index(self): + return cherrypy.request.wsgi_environ['SERVER_PORT'] + + @cherrypy.expose + def upload(self, file): + return "Size: %s" % len(file.file.read()) + + @cherrypy.expose + @cherrypy.config(**{'request.body.maxbytes': 100}) + def tinyupload(self): + return cherrypy.request.body.read() + + cherrypy.tree.mount(Root()) + + cherrypy.config.update({ + 'server.socket_host': '0.0.0.0', + 'server.socket_port': 9876, + 'server.max_request_body_size': 200, + 'server.max_request_header_size': 500, + 'server.socket_timeout': 0.5, + + # Test explicit server.instance + 'server.2.instance': 'cherrypy._cpwsgi_server.CPWSGIServer', + 'server.2.socket_port': 9877, + + # Test non-numeric + # Also test default server.instance = builtin server + 'server.yetanother.socket_port': 9878, + }) + + PORT = 9876 + + def testBasicConfig(self): + self.getPage("/") + self.assertBody(str(self.PORT)) + + def testAdditionalServers(self): + if self.scheme == 'https': + return self.skip("not available under ssl") + self.PORT = 9877 + self.getPage("/") + self.assertBody(str(self.PORT)) + self.PORT = 9878 + self.getPage("/") + self.assertBody(str(self.PORT)) + + def testMaxRequestSizePerHandler(self): + if getattr(cherrypy.server, "using_apache", False): + return self.skip("skipped due to known Apache differences... ") + + self.getPage('/tinyupload', method="POST", + headers=[('Content-Type', 'text/plain'), + ('Content-Length', '100')], + body="x" * 100) + self.assertStatus(200) + self.assertBody("x" * 100) + + self.getPage('/tinyupload', method="POST", + headers=[('Content-Type', 'text/plain'), + ('Content-Length', '101')], + body="x" * 101) + self.assertStatus(413) + + def testMaxRequestSize(self): + if getattr(cherrypy.server, "using_apache", False): + return self.skip("skipped due to known Apache differences... ") + + for size in (500, 5000, 50000): + self.getPage("/", headers=[('From', "x" * 500)]) + self.assertStatus(413) + + # Test for https://github.com/cherrypy/cherrypy/issues/421 + # (Incorrect border condition in readline of SizeCheckWrapper). + # This hangs in rev 891 and earlier. + lines256 = "x" * 248 + self.getPage("/", + headers=[('Host', '%s:%s' % (self.HOST, self.PORT)), + ('From', lines256)]) + + # Test upload + cd = ( + 'Content-Disposition: form-data; ' + 'name="file"; ' + 'filename="hello.txt"' + ) + body = '\r\n'.join([ + '--x', + cd, + 'Content-Type: text/plain', + '', + '%s', + '--x--']) + partlen = 200 - len(body) + b = body % ("x" * partlen) + h = [("Content-type", "multipart/form-data; boundary=x"), + ("Content-Length", "%s" % len(b))] + self.getPage('/upload', h, "POST", b) + self.assertBody('Size: %d' % partlen) + + b = body % ("x" * 200) + h = [("Content-type", "multipart/form-data; boundary=x"), + ("Content-Length", "%s" % len(b))] + self.getPage('/upload', h, "POST", b) + self.assertStatus(413) diff --git a/deps/cherrypy/test/test_conn.py b/deps/cherrypy/test/test_conn.py new file mode 100644 index 00000000..d3dc3f21 --- /dev/null +++ b/deps/cherrypy/test/test_conn.py @@ -0,0 +1,864 @@ +"""Tests for TCP connection handling, including proper and timely close.""" + +import socket +import sys +import time +import errno + +import six + +import cherrypy +from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, NotConnected +from cherrypy._cpcompat import ( + BadStatusLine, + ntob, + tonative, + urlopen, +) +from cherrypy.test import webtest + + +timeout = 1 +pov = 'pPeErRsSiIsStTeEnNcCeE oOfF vViIsSiIoOnN' + + +def setup_server(): + + def raise500(): + raise cherrypy.HTTPError(500) + + class Root: + + @cherrypy.expose + def index(self): + return pov + page1 = index + page2 = index + page3 = index + + @cherrypy.expose + def hello(self): + return "Hello, world!" + + @cherrypy.expose + def timeout(self, t): + return str(cherrypy.server.httpserver.timeout) + + @cherrypy.expose + @cherrypy.config(**{'response.stream': True}) + def stream(self, set_cl=False): + if set_cl: + cherrypy.response.headers['Content-Length'] = 10 + + def content(): + for x in range(10): + yield str(x) + + return content() + + @cherrypy.expose + def error(self, code=500): + raise cherrypy.HTTPError(code) + + @cherrypy.expose + def upload(self): + if not cherrypy.request.method == 'POST': + raise AssertionError("'POST' != request.method %r" % + cherrypy.request.method) + return "thanks for '%s'" % cherrypy.request.body.read() + + @cherrypy.expose + def custom(self, response_code): + cherrypy.response.status = response_code + return "Code = %s" % response_code + + @cherrypy.expose + @cherrypy.config(**{'hooks.on_start_resource': raise500}) + def err_before_read(self): + return "ok" + + @cherrypy.expose + def one_megabyte_of_a(self): + return ["a" * 1024] * 1024 + + @cherrypy.expose + # Turn off the encoding tool so it doens't collapse + # our response body and reclaculate the Content-Length. + @cherrypy.config(**{'tools.encode.on': False}) + def custom_cl(self, body, cl): + cherrypy.response.headers['Content-Length'] = cl + if not isinstance(body, list): + body = [body] + newbody = [] + for chunk in body: + if isinstance(chunk, six.text_type): + chunk = chunk.encode('ISO-8859-1') + newbody.append(chunk) + return newbody + + cherrypy.tree.mount(Root()) + cherrypy.config.update({ + 'server.max_request_body_size': 1001, + 'server.socket_timeout': timeout, + }) + + +from cherrypy.test import helper + + +class ConnectionCloseTests(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_HTTP11(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + self.persistent = True + + # Make the first request and assert there's no "Connection: close". + self.getPage("/") + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertNoHeader("Connection") + + # Make another request on the same connection. + self.getPage("/page1") + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertNoHeader("Connection") + + # Test client-side close. + self.getPage("/page2", headers=[("Connection", "close")]) + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertHeader("Connection", "close") + + # Make another request on the same connection, which should error. + self.assertRaises(NotConnected, self.getPage, "/") + + def test_Streaming_no_len(self): + try: + self._streaming(set_cl=False) + finally: + try: + self.HTTP_CONN.close() + except (TypeError, AttributeError): + pass + + def test_Streaming_with_len(self): + try: + self._streaming(set_cl=True) + finally: + try: + self.HTTP_CONN.close() + except (TypeError, AttributeError): + pass + + def _streaming(self, set_cl): + if cherrypy.server.protocol_version == "HTTP/1.1": + self.PROTOCOL = "HTTP/1.1" + + self.persistent = True + + # Make the first request and assert there's no "Connection: close". + self.getPage("/") + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertNoHeader("Connection") + + # Make another, streamed request on the same connection. + if set_cl: + # When a Content-Length is provided, the content should stream + # without closing the connection. + self.getPage("/stream?set_cl=Yes") + self.assertHeader("Content-Length") + self.assertNoHeader("Connection", "close") + self.assertNoHeader("Transfer-Encoding") + + self.assertStatus('200 OK') + self.assertBody('0123456789') + else: + # When no Content-Length response header is provided, + # streamed output will either close the connection, or use + # chunked encoding, to determine transfer-length. + self.getPage("/stream") + self.assertNoHeader("Content-Length") + self.assertStatus('200 OK') + self.assertBody('0123456789') + + chunked_response = False + for k, v in self.headers: + if k.lower() == "transfer-encoding": + if str(v) == "chunked": + chunked_response = True + + if chunked_response: + self.assertNoHeader("Connection", "close") + else: + self.assertHeader("Connection", "close") + + # Make another request on the same connection, which should + # error. + self.assertRaises(NotConnected, self.getPage, "/") + + # Try HEAD. See + # https://github.com/cherrypy/cherrypy/issues/864. + self.getPage("/stream", method='HEAD') + self.assertStatus('200 OK') + self.assertBody('') + self.assertNoHeader("Transfer-Encoding") + else: + self.PROTOCOL = "HTTP/1.0" + + self.persistent = True + + # Make the first request and assert Keep-Alive. + self.getPage("/", headers=[("Connection", "Keep-Alive")]) + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertHeader("Connection", "Keep-Alive") + + # Make another, streamed request on the same connection. + if set_cl: + # When a Content-Length is provided, the content should + # stream without closing the connection. + self.getPage("/stream?set_cl=Yes", + headers=[("Connection", "Keep-Alive")]) + self.assertHeader("Content-Length") + self.assertHeader("Connection", "Keep-Alive") + self.assertNoHeader("Transfer-Encoding") + self.assertStatus('200 OK') + self.assertBody('0123456789') + else: + # When a Content-Length is not provided, + # the server should close the connection. + self.getPage("/stream", headers=[("Connection", "Keep-Alive")]) + self.assertStatus('200 OK') + self.assertBody('0123456789') + + self.assertNoHeader("Content-Length") + self.assertNoHeader("Connection", "Keep-Alive") + self.assertNoHeader("Transfer-Encoding") + + # Make another request on the same connection, which should + # error. + self.assertRaises(NotConnected, self.getPage, "/") + + def test_HTTP10_KeepAlive(self): + self.PROTOCOL = "HTTP/1.0" + if self.scheme == "https": + self.HTTP_CONN = HTTPSConnection + else: + self.HTTP_CONN = HTTPConnection + + # Test a normal HTTP/1.0 request. + self.getPage("/page2") + self.assertStatus('200 OK') + self.assertBody(pov) + # Apache, for example, may emit a Connection header even for HTTP/1.0 +# self.assertNoHeader("Connection") + + # Test a keep-alive HTTP/1.0 request. + self.persistent = True + + self.getPage("/page3", headers=[("Connection", "Keep-Alive")]) + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertHeader("Connection", "Keep-Alive") + + # Remove the keep-alive header again. + self.getPage("/page3") + self.assertStatus('200 OK') + self.assertBody(pov) + # Apache, for example, may emit a Connection header even for HTTP/1.0 +# self.assertNoHeader("Connection") + + +class PipelineTests(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_HTTP11_Timeout(self): + # If we timeout without sending any data, + # the server will close the conn with a 408. + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Connect but send nothing. + self.persistent = True + conn = self.HTTP_CONN + conn.auto_open = False + conn.connect() + + # Wait for our socket timeout + time.sleep(timeout * 2) + + # The request should have returned 408 already. + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 408) + conn.close() + + # Connect but send half the headers only. + self.persistent = True + conn = self.HTTP_CONN + conn.auto_open = False + conn.connect() + conn.send(ntob('GET /hello HTTP/1.1')) + conn.send(("Host: %s" % self.HOST).encode('ascii')) + + # Wait for our socket timeout + time.sleep(timeout * 2) + + # The conn should have already sent 408. + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 408) + conn.close() + + def test_HTTP11_Timeout_after_request(self): + # If we timeout after at least one request has succeeded, + # the server will close the conn without 408. + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Make an initial request + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("GET", "/timeout?t=%s" % timeout, skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 200) + self.body = response.read() + self.assertBody(str(timeout)) + + # Make a second request on the same socket + conn._output(ntob('GET /hello HTTP/1.1')) + conn._output(ntob("Host: %s" % self.HOST, 'ascii')) + conn._send_output() + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 200) + self.body = response.read() + self.assertBody("Hello, world!") + + # Wait for our socket timeout + time.sleep(timeout * 2) + + # Make another request on the same socket, which should error + conn._output(ntob('GET /hello HTTP/1.1')) + conn._output(ntob("Host: %s" % self.HOST, 'ascii')) + conn._send_output() + response = conn.response_class(conn.sock, method="GET") + try: + response.begin() + except: + if not isinstance(sys.exc_info()[1], + (socket.error, BadStatusLine)): + self.fail("Writing to timed out socket didn't fail" + " as it should have: %s" % sys.exc_info()[1]) + else: + if response.status != 408: + self.fail("Writing to timed out socket didn't fail" + " as it should have: %s" % + response.read()) + + conn.close() + + # Make another request on a new socket, which should work + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("GET", "/", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 200) + self.body = response.read() + self.assertBody(pov) + + # Make another request on the same socket, + # but timeout on the headers + conn.send(ntob('GET /hello HTTP/1.1')) + # Wait for our socket timeout + time.sleep(timeout * 2) + response = conn.response_class(conn.sock, method="GET") + try: + response.begin() + except: + if not isinstance(sys.exc_info()[1], + (socket.error, BadStatusLine)): + self.fail("Writing to timed out socket didn't fail" + " as it should have: %s" % sys.exc_info()[1]) + else: + self.fail("Writing to timed out socket didn't fail" + " as it should have: %s" % + response.read()) + + conn.close() + + # Retry the request on a new connection, which should work + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("GET", "/", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 200) + self.body = response.read() + self.assertBody(pov) + conn.close() + + def test_HTTP11_pipelining(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Test pipelining. httplib doesn't support this directly. + self.persistent = True + conn = self.HTTP_CONN + + # Put request 1 + conn.putrequest("GET", "/hello", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + + for trial in range(5): + # Put next request + conn._output(ntob('GET /hello HTTP/1.1')) + conn._output(ntob("Host: %s" % self.HOST, 'ascii')) + conn._send_output() + + # Retrieve previous response + response = conn.response_class(conn.sock, method="GET") + # there is a bug in python3 regarding the buffering of + # ``conn.sock``. Until that bug get's fixed we will + # monkey patch the ``reponse`` instance. + # https://bugs.python.org/issue23377 + if six.PY3: + response.fp = conn.sock.makefile("rb", 0) + response.begin() + body = response.read(13) + self.assertEqual(response.status, 200) + self.assertEqual(body, ntob("Hello, world!")) + + # Retrieve final response + response = conn.response_class(conn.sock, method="GET") + response.begin() + body = response.read() + self.assertEqual(response.status, 200) + self.assertEqual(body, ntob("Hello, world!")) + + conn.close() + + def test_100_Continue(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + self.persistent = True + conn = self.HTTP_CONN + + # Try a page without an Expect request header first. + # Note that httplib's response.begin automatically ignores + # 100 Continue responses, so we must manually check for it. + try: + conn.putrequest("POST", "/upload", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Content-Type", "text/plain") + conn.putheader("Content-Length", "4") + conn.endheaders() + conn.send(ntob("d'oh")) + response = conn.response_class(conn.sock, method="POST") + version, status, reason = response._read_status() + self.assertNotEqual(status, 100) + finally: + conn.close() + + # Now try a page with an Expect header... + try: + conn.connect() + conn.putrequest("POST", "/upload", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Content-Type", "text/plain") + conn.putheader("Content-Length", "17") + conn.putheader("Expect", "100-continue") + conn.endheaders() + response = conn.response_class(conn.sock, method="POST") + + # ...assert and then skip the 100 response + version, status, reason = response._read_status() + self.assertEqual(status, 100) + while True: + line = response.fp.readline().strip() + if line: + self.fail( + "100 Continue should not output any headers. Got %r" % + line) + else: + break + + # ...send the body + body = ntob("I am a small file") + conn.send(body) + + # ...get the final response + response.begin() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(200) + self.assertBody("thanks for '%s'" % body) + finally: + conn.close() + + +class ConnectionTests(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_readall_or_close(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + if self.scheme == "https": + self.HTTP_CONN = HTTPSConnection + else: + self.HTTP_CONN = HTTPConnection + + # Test a max of 0 (the default) and then reset to what it was above. + old_max = cherrypy.server.max_request_body_size + for new_max in (0, old_max): + cherrypy.server.max_request_body_size = new_max + + self.persistent = True + conn = self.HTTP_CONN + + # Get a POST page with an error + conn.putrequest("POST", "/err_before_read", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Content-Type", "text/plain") + conn.putheader("Content-Length", "1000") + conn.putheader("Expect", "100-continue") + conn.endheaders() + response = conn.response_class(conn.sock, method="POST") + + # ...assert and then skip the 100 response + version, status, reason = response._read_status() + self.assertEqual(status, 100) + while True: + skip = response.fp.readline().strip() + if not skip: + break + + # ...send the body + conn.send(ntob("x" * 1000)) + + # ...get the final response + response.begin() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(500) + + # Now try a working page with an Expect header... + conn._output(ntob('POST /upload HTTP/1.1')) + conn._output(ntob("Host: %s" % self.HOST, 'ascii')) + conn._output(ntob("Content-Type: text/plain")) + conn._output(ntob("Content-Length: 17")) + conn._output(ntob("Expect: 100-continue")) + conn._send_output() + response = conn.response_class(conn.sock, method="POST") + + # ...assert and then skip the 100 response + version, status, reason = response._read_status() + self.assertEqual(status, 100) + while True: + skip = response.fp.readline().strip() + if not skip: + break + + # ...send the body + body = ntob("I am a small file") + conn.send(body) + + # ...get the final response + response.begin() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(200) + self.assertBody("thanks for '%s'" % body) + conn.close() + + def test_No_Message_Body(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Set our HTTP_CONN to an instance so it persists between requests. + self.persistent = True + + # Make the first request and assert there's no "Connection: close". + self.getPage("/") + self.assertStatus('200 OK') + self.assertBody(pov) + self.assertNoHeader("Connection") + + # Make a 204 request on the same connection. + self.getPage("/custom/204") + self.assertStatus(204) + self.assertNoHeader("Content-Length") + self.assertBody("") + self.assertNoHeader("Connection") + + # Make a 304 request on the same connection. + self.getPage("/custom/304") + self.assertStatus(304) + self.assertNoHeader("Content-Length") + self.assertBody("") + self.assertNoHeader("Connection") + + def test_Chunked_Encoding(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + if (hasattr(self, 'harness') and + "modpython" in self.harness.__class__.__name__.lower()): + # mod_python forbids chunked encoding + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Set our HTTP_CONN to an instance so it persists between requests. + self.persistent = True + conn = self.HTTP_CONN + + # Try a normal chunked request (with extensions) + body = ntob("8;key=value\r\nxx\r\nxxxx\r\n5\r\nyyyyy\r\n0\r\n" + "Content-Type: application/json\r\n" + "\r\n") + conn.putrequest("POST", "/upload", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Transfer-Encoding", "chunked") + conn.putheader("Trailer", "Content-Type") + # Note that this is somewhat malformed: + # we shouldn't be sending Content-Length. + # RFC 2616 says the server should ignore it. + conn.putheader("Content-Length", "3") + conn.endheaders() + conn.send(body) + response = conn.getresponse() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus('200 OK') + self.assertBody("thanks for '%s'" % ntob('xx\r\nxxxxyyyyy')) + + # Try a chunked request that exceeds server.max_request_body_size. + # Note that the delimiters and trailer are included. + body = ntob("3e3\r\n" + ("x" * 995) + "\r\n0\r\n\r\n") + conn.putrequest("POST", "/upload", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Transfer-Encoding", "chunked") + conn.putheader("Content-Type", "text/plain") + # Chunked requests don't need a content-length +## conn.putheader("Content-Length", len(body)) + conn.endheaders() + conn.send(body) + response = conn.getresponse() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(413) + conn.close() + + def test_Content_Length_in(self): + # Try a non-chunked request where Content-Length exceeds + # server.max_request_body_size. Assert error before body send. + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("POST", "/upload", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Content-Type", "text/plain") + conn.putheader("Content-Length", "9999") + conn.endheaders() + response = conn.getresponse() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(413) + self.assertBody("The entity sent with the request exceeds " + "the maximum allowed bytes.") + conn.close() + + def test_Content_Length_out_preheaders(self): + # Try a non-chunked response where Content-Length is less than + # the actual bytes in the response body. + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("GET", "/custom_cl?body=I+have+too+many+bytes&cl=5", + skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.getresponse() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(500) + self.assertBody( + "The requested resource returned more bytes than the " + "declared Content-Length.") + conn.close() + + def test_Content_Length_out_postheaders(self): + # Try a non-chunked response where Content-Length is less than + # the actual bytes in the response body. + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest( + "GET", "/custom_cl?body=I+too&body=+have+too+many&cl=5", + skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.getresponse() + self.status, self.headers, self.body = webtest.shb(response) + self.assertStatus(200) + self.assertBody("I too") + conn.close() + + def test_598(self): + remote_data_conn = urlopen('%s://%s:%s/one_megabyte_of_a/' % + (self.scheme, self.HOST, self.PORT,)) + buf = remote_data_conn.read(512) + time.sleep(timeout * 0.6) + remaining = (1024 * 1024) - 512 + while remaining: + data = remote_data_conn.read(remaining) + if not data: + break + else: + buf += data + remaining -= len(data) + + self.assertEqual(len(buf), 1024 * 1024) + self.assertEqual(buf, ntob("a" * 1024 * 1024)) + self.assertEqual(remaining, 0) + remote_data_conn.close() + + +def setup_upload_server(): + + class Root: + @cherrypy.expose + def upload(self): + if not cherrypy.request.method == 'POST': + raise AssertionError("'POST' != request.method %r" % + cherrypy.request.method) + return "thanks for '%s'" % tonative(cherrypy.request.body.read()) + + cherrypy.tree.mount(Root()) + cherrypy.config.update({ + 'server.max_request_body_size': 1001, + 'server.socket_timeout': 10, + 'server.accepted_queue_size': 5, + 'server.accepted_queue_timeout': 0.1, + }) + +reset_names = 'ECONNRESET', 'WSAECONNRESET' +socket_reset_errors = [ + getattr(errno, name) + for name in reset_names + if hasattr(errno, name) +] +"reset error numbers available on this platform" + +socket_reset_errors += [ + # Python 3.5 raises an http.client.RemoteDisconnected + # with this message + "Remote end closed connection without response", +] + + +class LimitedRequestQueueTests(helper.CPWebCase): + setup_server = staticmethod(setup_upload_server) + + def test_queue_full(self): + conns = [] + overflow_conn = None + + try: + # Make 15 initial requests and leave them open, which should use + # all of wsgiserver's WorkerThreads and fill its Queue. + for i in range(15): + conn = self.HTTP_CONN(self.HOST, self.PORT) + conn.putrequest("POST", "/upload", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader("Content-Type", "text/plain") + conn.putheader("Content-Length", "4") + conn.endheaders() + conns.append(conn) + + # Now try a 16th conn, which should be closed by the server immediately. + overflow_conn = self.HTTP_CONN(self.HOST, self.PORT) + # Manually connect since httplib won't let us set a timeout + for res in socket.getaddrinfo(self.HOST, self.PORT, 0, + socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + overflow_conn.sock = socket.socket(af, socktype, proto) + overflow_conn.sock.settimeout(5) + overflow_conn.sock.connect(sa) + break + + overflow_conn.putrequest("GET", "/", skip_host=True) + overflow_conn.putheader("Host", self.HOST) + overflow_conn.endheaders() + response = overflow_conn.response_class(overflow_conn.sock, method="GET") + try: + response.begin() + except socket.error as exc: + if exc.args[0] in socket_reset_errors: + pass # Expected. + else: + tmpl = ( + "Overflow conn did not get RST. " + "Got {exc.args!r} instead" + ) + raise AssertionError(tmpl.format(**locals())) + except BadStatusLine: + # This is a special case in OS X. Linux and Windows will + # RST correctly. + assert sys.platform == 'darwin' + else: + raise AssertionError("Overflow conn did not get RST ") + finally: + for conn in conns: + conn.send(ntob("done")) + response = conn.response_class(conn.sock, method="POST") + response.begin() + self.body = response.read() + self.assertBody("thanks for 'done'") + self.assertEqual(response.status, 200) + conn.close() + if overflow_conn: + overflow_conn.close() + +class BadRequestTests(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_No_CRLF(self): + self.persistent = True + + conn = self.HTTP_CONN + conn.send(ntob('GET /hello HTTP/1.1\n\n')) + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.body = response.read() + self.assertBody("HTTP requires CRLF terminators") + conn.close() + + conn.connect() + conn.send(ntob('GET /hello HTTP/1.1\r\n\n')) + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.body = response.read() + self.assertBody("HTTP requires CRLF terminators") + conn.close() diff --git a/deps/cherrypy/test/test_core.py b/deps/cherrypy/test/test_core.py new file mode 100644 index 00000000..53263957 --- /dev/null +++ b/deps/cherrypy/test/test_core.py @@ -0,0 +1,759 @@ +# coding: utf-8 + +"""Basic tests for the CherryPy core: request handling.""" + +import os +localDir = os.path.dirname(__file__) +import sys +import types + +import cherrypy +from cherrypy._cpcompat import itervalues, ntob, ntou +from cherrypy import _cptools, tools +from cherrypy.lib import httputil, static +from cherrypy.test._test_decorators import ExposeExamples + + +favicon_path = os.path.join(os.getcwd(), localDir, "../favicon.ico") + +# Client-side code # + +from cherrypy.test import helper + + +class CoreRequestHandlingTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "hello" + + favicon_ico = tools.staticfile.handler(filename=favicon_path) + + @cherrypy.expose + def defct(self, newct): + newct = "text/%s" % newct + cherrypy.config.update({'tools.response_headers.on': True, + 'tools.response_headers.headers': + [('Content-Type', newct)]}) + + @cherrypy.expose + def baseurl(self, path_info, relative=None): + return cherrypy.url(path_info, relative=bool(relative)) + + root = Root() + root.expose_dec = ExposeExamples() + + class TestType(type): + + """Metaclass which automatically exposes all functions in each + subclass, and adds an instance of the subclass as an attribute + of root. + """ + def __init__(cls, name, bases, dct): + type.__init__(cls, name, bases, dct) + for value in itervalues(dct): + if isinstance(value, types.FunctionType): + value.exposed = True + setattr(root, name.lower(), cls()) + Test = TestType('Test', (object, ), {}) + + @cherrypy.config(**{'tools.trailing_slash.on': False}) + class URL(Test): + + def index(self, path_info, relative=None): + if relative != 'server': + relative = bool(relative) + return cherrypy.url(path_info, relative=relative) + + def leaf(self, path_info, relative=None): + if relative != 'server': + relative = bool(relative) + return cherrypy.url(path_info, relative=relative) + + def log_status(): + Status.statuses.append(cherrypy.response.status) + cherrypy.tools.log_status = cherrypy.Tool( + 'on_end_resource', log_status) + + class Status(Test): + + def index(self): + return "normal" + + def blank(self): + cherrypy.response.status = "" + + # According to RFC 2616, new status codes are OK as long as they + # are between 100 and 599. + + # Here is an illegal code... + def illegal(self): + cherrypy.response.status = 781 + return "oops" + + # ...and here is an unknown but legal code. + def unknown(self): + cherrypy.response.status = "431 My custom error" + return "funky" + + # Non-numeric code + def bad(self): + cherrypy.response.status = "error" + return "bad news" + + statuses = [] + + @cherrypy.config(**{'tools.log_status.on': True}) + def on_end_resource_stage(self): + return repr(self.statuses) + + class Redirect(Test): + + @cherrypy.config(**{ + "tools.err_redirect.on": True, + "tools.err_redirect.url": "/errpage", + "tools.err_redirect.internal": False, + }) + class Error: + @cherrypy.expose + def index(self): + raise NameError("redirect_test") + + error = Error() + + def index(self): + return "child" + + def custom(self, url, code): + raise cherrypy.HTTPRedirect(url, code) + + @cherrypy.config(**{'tools.trailing_slash.extra': True}) + def by_code(self, code): + raise cherrypy.HTTPRedirect("somewhere%20else", code) + + def nomodify(self): + raise cherrypy.HTTPRedirect("", 304) + + def proxy(self): + raise cherrypy.HTTPRedirect("proxy", 305) + + def stringify(self): + return str(cherrypy.HTTPRedirect("/")) + + def fragment(self, frag): + raise cherrypy.HTTPRedirect("/some/url#%s" % frag) + + def url_with_quote(self): + raise cherrypy.HTTPRedirect("/some\"url/that'we/want") + + def url_with_unicode(self): + raise cherrypy.HTTPRedirect(ntou("тест", "utf-8")) + + def login_redir(): + if not getattr(cherrypy.request, "login", None): + raise cherrypy.InternalRedirect("/internalredirect/login") + tools.login_redir = _cptools.Tool('before_handler', login_redir) + + def redir_custom(): + raise cherrypy.InternalRedirect("/internalredirect/custom_err") + + class InternalRedirect(Test): + + def index(self): + raise cherrypy.InternalRedirect("/") + + @cherrypy.expose + @cherrypy.config(**{'hooks.before_error_response': redir_custom}) + def choke(self): + return 3 / 0 + + def relative(self, a, b): + raise cherrypy.InternalRedirect("cousin?t=6") + + def cousin(self, t): + assert cherrypy.request.prev.closed + return cherrypy.request.prev.query_string + + def petshop(self, user_id): + if user_id == "parrot": + # Trade it for a slug when redirecting + raise cherrypy.InternalRedirect( + '/image/getImagesByUser?user_id=slug') + elif user_id == "terrier": + # Trade it for a fish when redirecting + raise cherrypy.InternalRedirect( + '/image/getImagesByUser?user_id=fish') + else: + # This should pass the user_id through to getImagesByUser + raise cherrypy.InternalRedirect( + '/image/getImagesByUser?user_id=%s' % str(user_id)) + + # We support Python 2.3, but the @-deco syntax would look like + # this: + # @tools.login_redir() + def secure(self): + return "Welcome!" + secure = tools.login_redir()(secure) + # Since calling the tool returns the same function you pass in, + # you could skip binding the return value, and just write: + # tools.login_redir()(secure) + + def login(self): + return "Please log in" + + def custom_err(self): + return "Something went horribly wrong." + + @cherrypy.config(**{'hooks.before_request_body': redir_custom}) + def early_ir(self, arg): + return "whatever" + + class Image(Test): + + def getImagesByUser(self, user_id): + return "0 images for %s" % user_id + + class Flatten(Test): + + def as_string(self): + return "content" + + def as_list(self): + return ["con", "tent"] + + def as_yield(self): + yield ntob("content") + + @cherrypy.config(**{'tools.flatten.on': True}) + def as_dblyield(self): + yield self.as_yield() + + def as_refyield(self): + for chunk in self.as_yield(): + yield chunk + + class Ranges(Test): + + def get_ranges(self, bytes): + return repr(httputil.get_ranges('bytes=%s' % bytes, 8)) + + def slice_file(self): + path = os.path.join(os.getcwd(), os.path.dirname(__file__)) + return static.serve_file( + os.path.join(path, "static/index.html")) + + class Cookies(Test): + + def single(self, name): + cookie = cherrypy.request.cookie[name] + # Python2's SimpleCookie.__setitem__ won't take unicode keys. + cherrypy.response.cookie[str(name)] = cookie.value + + def multiple(self, names): + list(map(self.single, names)) + + def append_headers(header_list, debug=False): + if debug: + cherrypy.log( + "Extending response headers with %s" % repr(header_list), + "TOOLS.APPEND_HEADERS") + cherrypy.serving.response.header_list.extend(header_list) + cherrypy.tools.append_headers = cherrypy.Tool( + 'on_end_resource', append_headers) + + class MultiHeader(Test): + + def header_list(self): + pass + header_list = cherrypy.tools.append_headers(header_list=[ + (ntob('WWW-Authenticate'), ntob('Negotiate')), + (ntob('WWW-Authenticate'), ntob('Basic realm="foo"')), + ])(header_list) + + def commas(self): + cherrypy.response.headers[ + 'WWW-Authenticate'] = 'Negotiate,Basic realm="foo"' + + cherrypy.tree.mount(root) + + def testStatus(self): + self.getPage("/status/") + self.assertBody('normal') + self.assertStatus(200) + + self.getPage("/status/blank") + self.assertBody('') + self.assertStatus(200) + + self.getPage("/status/illegal") + self.assertStatus(500) + msg = "Illegal response status from server (781 is out of range)." + self.assertErrorPage(500, msg) + + if not getattr(cherrypy.server, 'using_apache', False): + self.getPage("/status/unknown") + self.assertBody('funky') + self.assertStatus(431) + + self.getPage("/status/bad") + self.assertStatus(500) + msg = "Illegal response status from server ('error' is non-numeric)." + self.assertErrorPage(500, msg) + + def test_on_end_resource_status(self): + self.getPage('/status/on_end_resource_stage') + self.assertBody('[]') + self.getPage('/status/on_end_resource_stage') + self.assertBody(repr(["200 OK"])) + + def testSlashes(self): + # Test that requests for index methods without a trailing slash + # get redirected to the same URI path with a trailing slash. + # Make sure GET params are preserved. + self.getPage("/redirect?id=3") + self.assertStatus(301) + self.assertMatchesBody('' + "%s/redirect/[?]id=3" % (self.base(), self.base())) + + if self.prefix(): + # Corner case: the "trailing slash" redirect could be tricky if + # we're using a virtual root and the URI is "/vroot" (no slash). + self.getPage("") + self.assertStatus(301) + self.assertMatchesBody("%s/" % + (self.base(), self.base())) + + # Test that requests for NON-index methods WITH a trailing slash + # get redirected to the same URI path WITHOUT a trailing slash. + # Make sure GET params are preserved. + self.getPage("/redirect/by_code/?code=307") + self.assertStatus(301) + self.assertMatchesBody("" + "%s/redirect/by_code[?]code=307" + % (self.base(), self.base())) + + # If the trailing_slash tool is off, CP should just continue + # as if the slashes were correct. But it needs some help + # inside cherrypy.url to form correct output. + self.getPage('/url?path_info=page1') + self.assertBody('%s/url/page1' % self.base()) + self.getPage('/url/leaf/?path_info=page1') + self.assertBody('%s/url/page1' % self.base()) + + def testRedirect(self): + self.getPage("/redirect/") + self.assertBody('child') + self.assertStatus(200) + + self.getPage("/redirect/by_code?code=300") + self.assertMatchesBody( + r"\2somewhere%20else") + self.assertStatus(300) + + self.getPage("/redirect/by_code?code=301") + self.assertMatchesBody( + r"\2somewhere%20else") + self.assertStatus(301) + + self.getPage("/redirect/by_code?code=302") + self.assertMatchesBody( + r"\2somewhere%20else") + self.assertStatus(302) + + self.getPage("/redirect/by_code?code=303") + self.assertMatchesBody( + r"\2somewhere%20else") + self.assertStatus(303) + + self.getPage("/redirect/by_code?code=307") + self.assertMatchesBody( + r"\2somewhere%20else") + self.assertStatus(307) + + self.getPage("/redirect/nomodify") + self.assertBody('') + self.assertStatus(304) + + self.getPage("/redirect/proxy") + self.assertBody('') + self.assertStatus(305) + + # HTTPRedirect on error + self.getPage("/redirect/error/") + self.assertStatus(('302 Found', '303 See Other')) + self.assertInBody('/errpage') + + # Make sure str(HTTPRedirect()) works. + self.getPage("/redirect/stringify", protocol="HTTP/1.0") + self.assertStatus(200) + self.assertBody("(['%s/'], 302)" % self.base()) + if cherrypy.server.protocol_version == "HTTP/1.1": + self.getPage("/redirect/stringify", protocol="HTTP/1.1") + self.assertStatus(200) + self.assertBody("(['%s/'], 303)" % self.base()) + + # check that #fragments are handled properly + # http://skrb.org/ietf/http_errata.html#location-fragments + frag = "foo" + self.getPage("/redirect/fragment/%s" % frag) + self.assertMatchesBody( + r"\2\/some\/url\#%s" % ( + frag, frag)) + loc = self.assertHeader('Location') + assert loc.endswith("#%s" % frag) + self.assertStatus(('302 Found', '303 See Other')) + + # check injection protection + # See https://github.com/cherrypy/cherrypy/issues/1003 + self.getPage( + "/redirect/custom?" + "code=303&url=/foobar/%0d%0aSet-Cookie:%20somecookie=someval") + self.assertStatus(303) + loc = self.assertHeader('Location') + assert 'Set-Cookie' in loc + self.assertNoHeader('Set-Cookie') + + def assertValidXHTML(): + from xml.etree import ElementTree + try: + ElementTree.fromstring('%s' % self.body) + except ElementTree.ParseError as e: + self._handlewebError('automatically generated redirect ' + 'did not generate well-formed html') + + # check redirects to URLs generated valid HTML - we check this + # by seeing if it appears as valid XHTML. + self.getPage("/redirect/by_code?code=303") + self.assertStatus(303) + assertValidXHTML() + + # do the same with a url containing quote characters. + self.getPage("/redirect/url_with_quote") + self.assertStatus(303) + assertValidXHTML() + + def test_redirect_with_unicode(self): + """ + A redirect to a URL with Unicode should return a Location + header containing that Unicode URL. + """ + # test disabled due to #1440 + return + self.getPage("/redirect/url_with_unicode") + self.assertStatus(303) + loc = self.assertHeader('Location') + assert ntou('тест', encoding='utf-8') in loc + + def test_InternalRedirect(self): + # InternalRedirect + self.getPage("/internalredirect/") + self.assertBody('hello') + self.assertStatus(200) + + # Test passthrough + self.getPage( + "/internalredirect/petshop?user_id=Sir-not-appearing-in-this-film") + self.assertBody('0 images for Sir-not-appearing-in-this-film') + self.assertStatus(200) + + # Test args + self.getPage("/internalredirect/petshop?user_id=parrot") + self.assertBody('0 images for slug') + self.assertStatus(200) + + # Test POST + self.getPage("/internalredirect/petshop", method="POST", + body="user_id=terrier") + self.assertBody('0 images for fish') + self.assertStatus(200) + + # Test ir before body read + self.getPage("/internalredirect/early_ir", method="POST", + body="arg=aha!") + self.assertBody("Something went horribly wrong.") + self.assertStatus(200) + + self.getPage("/internalredirect/secure") + self.assertBody('Please log in') + self.assertStatus(200) + + # Relative path in InternalRedirect. + # Also tests request.prev. + self.getPage("/internalredirect/relative?a=3&b=5") + self.assertBody("a=3&b=5") + self.assertStatus(200) + + # InternalRedirect on error + self.getPage("/internalredirect/choke") + self.assertStatus(200) + self.assertBody("Something went horribly wrong.") + + def testFlatten(self): + for url in ["/flatten/as_string", "/flatten/as_list", + "/flatten/as_yield", "/flatten/as_dblyield", + "/flatten/as_refyield"]: + self.getPage(url) + self.assertBody('content') + + def testRanges(self): + self.getPage("/ranges/get_ranges?bytes=3-6") + self.assertBody("[(3, 7)]") + + # Test multiple ranges and a suffix-byte-range-spec, for good measure. + self.getPage("/ranges/get_ranges?bytes=2-4,-1") + self.assertBody("[(2, 5), (7, 8)]") + + # Test a suffix-byte-range longer than the content + # length. Note that in this test, the content length + # is 8 bytes. + self.getPage("/ranges/get_ranges?bytes=-100") + self.assertBody("[(0, 8)]") + + # Get a partial file. + if cherrypy.server.protocol_version == "HTTP/1.1": + self.getPage("/ranges/slice_file", [('Range', 'bytes=2-5')]) + self.assertStatus(206) + self.assertHeader("Content-Type", "text/html;charset=utf-8") + self.assertHeader("Content-Range", "bytes 2-5/14") + self.assertBody("llo,") + + # What happens with overlapping ranges (and out of order, too)? + self.getPage("/ranges/slice_file", [('Range', 'bytes=4-6,2-5')]) + self.assertStatus(206) + ct = self.assertHeader("Content-Type") + expected_type = "multipart/byteranges; boundary=" + self.assert_(ct.startswith(expected_type)) + boundary = ct[len(expected_type):] + expected_body = ("\r\n--%s\r\n" + "Content-type: text/html\r\n" + "Content-range: bytes 4-6/14\r\n" + "\r\n" + "o, \r\n" + "--%s\r\n" + "Content-type: text/html\r\n" + "Content-range: bytes 2-5/14\r\n" + "\r\n" + "llo,\r\n" + "--%s--\r\n" % (boundary, boundary, boundary)) + self.assertBody(expected_body) + self.assertHeader("Content-Length") + + # Test "416 Requested Range Not Satisfiable" + self.getPage("/ranges/slice_file", [('Range', 'bytes=2300-2900')]) + self.assertStatus(416) + # "When this status code is returned for a byte-range request, + # the response SHOULD include a Content-Range entity-header + # field specifying the current length of the selected resource" + self.assertHeader("Content-Range", "bytes */14") + elif cherrypy.server.protocol_version == "HTTP/1.0": + # Test Range behavior with HTTP/1.0 request + self.getPage("/ranges/slice_file", [('Range', 'bytes=2-5')]) + self.assertStatus(200) + self.assertBody("Hello, world\r\n") + + def testFavicon(self): + # favicon.ico is served by staticfile. + icofilename = os.path.join(localDir, "../favicon.ico") + icofile = open(icofilename, "rb") + data = icofile.read() + icofile.close() + + self.getPage("/favicon.ico") + self.assertBody(data) + + def skip_if_bad_cookies(self): + """ + cookies module fails to reject invalid cookies + https://github.com/cherrypy/cherrypy/issues/1405 + """ + cookies = sys.modules.get('http.cookies') + _is_legal_key = getattr(cookies, '_is_legal_key', lambda x: False) + if not _is_legal_key(','): + return + issue = 'http://bugs.python.org/issue26302' + tmpl = "Broken cookies module ({issue})" + self.skip(tmpl.format(**locals())) + + def testCookies(self): + self.skip_if_bad_cookies() + + self.getPage("/cookies/single?name=First", + [('Cookie', 'First=Dinsdale;')]) + self.assertHeader('Set-Cookie', 'First=Dinsdale') + + self.getPage("/cookies/multiple?names=First&names=Last", + [('Cookie', 'First=Dinsdale; Last=Piranha;'), + ]) + self.assertHeader('Set-Cookie', 'First=Dinsdale') + self.assertHeader('Set-Cookie', 'Last=Piranha') + + self.getPage("/cookies/single?name=Something-With%2CComma", + [('Cookie', 'Something-With,Comma=some-value')]) + self.assertStatus(400) + + def testDefaultContentType(self): + self.getPage('/') + self.assertHeader('Content-Type', 'text/html;charset=utf-8') + self.getPage('/defct/plain') + self.getPage('/') + self.assertHeader('Content-Type', 'text/plain;charset=utf-8') + self.getPage('/defct/html') + + def test_multiple_headers(self): + self.getPage('/multiheader/header_list') + self.assertEqual( + [(k, v) for k, v in self.headers if k == 'WWW-Authenticate'], + [('WWW-Authenticate', 'Negotiate'), + ('WWW-Authenticate', 'Basic realm="foo"'), + ]) + self.getPage('/multiheader/commas') + self.assertHeader('WWW-Authenticate', 'Negotiate,Basic realm="foo"') + + def test_cherrypy_url(self): + # Input relative to current + self.getPage('/url/leaf?path_info=page1') + self.assertBody('%s/url/page1' % self.base()) + self.getPage('/url/?path_info=page1') + self.assertBody('%s/url/page1' % self.base()) + # Other host header + host = 'www.mydomain.example' + self.getPage('/url/leaf?path_info=page1', + headers=[('Host', host)]) + self.assertBody('%s://%s/url/page1' % (self.scheme, host)) + + # Input is 'absolute'; that is, relative to script_name + self.getPage('/url/leaf?path_info=/page1') + self.assertBody('%s/page1' % self.base()) + self.getPage('/url/?path_info=/page1') + self.assertBody('%s/page1' % self.base()) + + # Single dots + self.getPage('/url/leaf?path_info=./page1') + self.assertBody('%s/url/page1' % self.base()) + self.getPage('/url/leaf?path_info=other/./page1') + self.assertBody('%s/url/other/page1' % self.base()) + self.getPage('/url/?path_info=/other/./page1') + self.assertBody('%s/other/page1' % self.base()) + + # Double dots + self.getPage('/url/leaf?path_info=../page1') + self.assertBody('%s/page1' % self.base()) + self.getPage('/url/leaf?path_info=other/../page1') + self.assertBody('%s/url/page1' % self.base()) + self.getPage('/url/leaf?path_info=/other/../page1') + self.assertBody('%s/page1' % self.base()) + + # Output relative to current path or script_name + self.getPage('/url/?path_info=page1&relative=True') + self.assertBody('page1') + self.getPage('/url/leaf?path_info=/page1&relative=True') + self.assertBody('../page1') + self.getPage('/url/leaf?path_info=page1&relative=True') + self.assertBody('page1') + self.getPage('/url/leaf?path_info=leaf/page1&relative=True') + self.assertBody('leaf/page1') + self.getPage('/url/leaf?path_info=../page1&relative=True') + self.assertBody('../page1') + self.getPage('/url/?path_info=other/../page1&relative=True') + self.assertBody('page1') + + # Output relative to / + self.getPage('/baseurl?path_info=ab&relative=True') + self.assertBody('ab') + # Output relative to / + self.getPage('/baseurl?path_info=/ab&relative=True') + self.assertBody('ab') + + # absolute-path references ("server-relative") + # Input relative to current + self.getPage('/url/leaf?path_info=page1&relative=server') + self.assertBody('/url/page1') + self.getPage('/url/?path_info=page1&relative=server') + self.assertBody('/url/page1') + # Input is 'absolute'; that is, relative to script_name + self.getPage('/url/leaf?path_info=/page1&relative=server') + self.assertBody('/page1') + self.getPage('/url/?path_info=/page1&relative=server') + self.assertBody('/page1') + + def test_expose_decorator(self): + # Test @expose + self.getPage("/expose_dec/no_call") + self.assertStatus(200) + self.assertBody("Mr E. R. Bradshaw") + + # Test @expose() + self.getPage("/expose_dec/call_empty") + self.assertStatus(200) + self.assertBody("Mrs. B.J. Smegma") + + # Test @expose("alias") + self.getPage("/expose_dec/call_alias") + self.assertStatus(200) + self.assertBody("Mr Nesbitt") + # Does the original name work? + self.getPage("/expose_dec/nesbitt") + self.assertStatus(200) + self.assertBody("Mr Nesbitt") + + # Test @expose(["alias1", "alias2"]) + self.getPage("/expose_dec/alias1") + self.assertStatus(200) + self.assertBody("Mr Ken Andrews") + self.getPage("/expose_dec/alias2") + self.assertStatus(200) + self.assertBody("Mr Ken Andrews") + # Does the original name work? + self.getPage("/expose_dec/andrews") + self.assertStatus(200) + self.assertBody("Mr Ken Andrews") + + # Test @expose(alias="alias") + self.getPage("/expose_dec/alias3") + self.assertStatus(200) + self.assertBody("Mr. and Mrs. Watson") + + +class ErrorTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + def break_header(): + # Add a header after finalize that is invalid + cherrypy.serving.response.header_list.append((2, 3)) + cherrypy.tools.break_header = cherrypy.Tool( + 'on_end_resource', break_header) + + class Root: + + @cherrypy.expose + def index(self): + return "hello" + + @cherrypy.config(**{'tools.break_header.on': True}) + def start_response_error(self): + return "salud!" + + @cherrypy.expose + def stat(self, path): + with cherrypy.HTTPError.handle(OSError, 404): + st = os.stat(path) + + root = Root() + + cherrypy.tree.mount(root) + + def test_start_response_error(self): + self.getPage("/start_response_error") + self.assertStatus(500) + self.assertInBody( + "TypeError: response.header_list key 2 is not a byte string.") + + def test_contextmanager(self): + self.getPage("/stat/missing") + self.assertStatus(404) + self.assertInBody("No such file or directory") diff --git a/deps/cherrypy/test/test_dynamicobjectmapping.py b/deps/cherrypy/test/test_dynamicobjectmapping.py new file mode 100644 index 00000000..331c3f2e --- /dev/null +++ b/deps/cherrypy/test/test_dynamicobjectmapping.py @@ -0,0 +1,424 @@ +import six + +import cherrypy +from cherrypy._cpcompat import sorted +from cherrypy.test import helper + +script_names = ["", "/foo", "/users/fred/blog", "/corp/blog"] + + +def setup_server(): + class SubSubRoot: + + @cherrypy.expose + def index(self): + return "SubSubRoot index" + + @cherrypy.expose + def default(self, *args): + return "SubSubRoot default" + + @cherrypy.expose + def handler(self): + return "SubSubRoot handler" + + @cherrypy.expose + def dispatch(self): + return "SubSubRoot dispatch" + + subsubnodes = { + '1': SubSubRoot(), + '2': SubSubRoot(), + } + + class SubRoot: + + @cherrypy.expose + def index(self): + return "SubRoot index" + + @cherrypy.expose + def default(self, *args): + return "SubRoot %s" % (args,) + + @cherrypy.expose + def handler(self): + return "SubRoot handler" + + def _cp_dispatch(self, vpath): + return subsubnodes.get(vpath[0], None) + + subnodes = { + '1': SubRoot(), + '2': SubRoot(), + } + + class Root: + + @cherrypy.expose + def index(self): + return "index" + + @cherrypy.expose + def default(self, *args): + return "default %s" % (args,) + + @cherrypy.expose + def handler(self): + return "handler" + + def _cp_dispatch(self, vpath): + return subnodes.get(vpath[0]) + + #-------------------------------------------------------------------------- + # DynamicNodeAndMethodDispatcher example. + # This example exposes a fairly naive HTTP api + class User(object): + + def __init__(self, id, name): + self.id = id + self.name = name + + def __unicode__(self): + return unicode(self.name) + + def __str__(self): + return str(self.name) + + user_lookup = { + 1: User(1, 'foo'), + 2: User(2, 'bar'), + } + + def make_user(name, id=None): + if not id: + id = max(*list(user_lookup.keys())) + 1 + user_lookup[id] = User(id, name) + return id + + @cherrypy.expose + class UserContainerNode(object): + + def POST(self, name): + """ + Allow the creation of a new Object + """ + return "POST %d" % make_user(name) + + def GET(self): + return six.text_type(sorted(user_lookup.keys())) + + def dynamic_dispatch(self, vpath): + try: + id = int(vpath[0]) + except (ValueError, IndexError): + return None + return UserInstanceNode(id) + + @cherrypy.expose + class UserInstanceNode(object): + + def __init__(self, id): + self.id = id + self.user = user_lookup.get(id, None) + + # For all but PUT methods there MUST be a valid user identified + # by self.id + if not self.user and cherrypy.request.method != 'PUT': + raise cherrypy.HTTPError(404) + + def GET(self, *args, **kwargs): + """ + Return the appropriate representation of the instance. + """ + return six.text_type(self.user) + + def POST(self, name): + """ + Update the fields of the user instance. + """ + self.user.name = name + return "POST %d" % self.user.id + + def PUT(self, name): + """ + Create a new user with the specified id, or edit it if it already + exists + """ + if self.user: + # Edit the current user + self.user.name = name + return "PUT %d" % self.user.id + else: + # Make a new user with said attributes. + return "PUT %d" % make_user(name, self.id) + + def DELETE(self): + """ + Delete the user specified at the id. + """ + id = self.user.id + del user_lookup[self.user.id] + del self.user + return "DELETE %d" % id + + class ABHandler: + + class CustomDispatch: + + @cherrypy.expose + def index(self, a, b): + return "custom" + + def _cp_dispatch(self, vpath): + """Make sure that if we don't pop anything from vpath, + processing still works. + """ + return self.CustomDispatch() + + @cherrypy.expose + def index(self, a, b=None): + body = ['a:' + str(a)] + if b is not None: + body.append(',b:' + str(b)) + return ''.join(body) + + @cherrypy.expose + def delete(self, a, b): + return 'deleting ' + str(a) + ' and ' + str(b) + + class IndexOnly: + + def _cp_dispatch(self, vpath): + """Make sure that popping ALL of vpath still shows the index + handler. + """ + while vpath: + vpath.pop() + return self + + @cherrypy.expose + def index(self): + return "IndexOnly index" + + class DecoratedPopArgs: + + """Test _cp_dispatch with @cherrypy.popargs.""" + + @cherrypy.expose + def index(self): + return "no params" + + @cherrypy.expose + def hi(self): + return "hi was not interpreted as 'a' param" + DecoratedPopArgs = cherrypy.popargs( + 'a', 'b', handler=ABHandler())(DecoratedPopArgs) + + class NonDecoratedPopArgs: + + """Test _cp_dispatch = cherrypy.popargs()""" + + _cp_dispatch = cherrypy.popargs('a') + + @cherrypy.expose + def index(self, a): + return "index: " + str(a) + + class ParameterizedHandler: + + """Special handler created for each request""" + + def __init__(self, a): + self.a = a + + @cherrypy.expose + def index(self): + if 'a' in cherrypy.request.params: + raise Exception( + "Parameterized handler argument ended up in " + "request.params") + return self.a + + class ParameterizedPopArgs: + + """Test cherrypy.popargs() with a function call handler""" + ParameterizedPopArgs = cherrypy.popargs( + 'a', handler=ParameterizedHandler)(ParameterizedPopArgs) + + Root.decorated = DecoratedPopArgs() + Root.undecorated = NonDecoratedPopArgs() + Root.index_only = IndexOnly() + Root.parameter_test = ParameterizedPopArgs() + + Root.users = UserContainerNode() + + md = cherrypy.dispatch.MethodDispatcher('dynamic_dispatch') + for url in script_names: + conf = {'/': { + 'user': (url or "/").split("/")[-2], + }, + '/users': { + 'request.dispatch': md + }, + } + cherrypy.tree.mount(Root(), url, conf) + + +class DynamicObjectMappingTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def testObjectMapping(self): + for url in script_names: + prefix = self.script_name = url + + self.getPage('/') + self.assertBody('index') + + self.getPage('/handler') + self.assertBody('handler') + + # Dynamic dispatch will succeed here for the subnodes + # so the subroot gets called + self.getPage('/1/') + self.assertBody('SubRoot index') + + self.getPage('/2/') + self.assertBody('SubRoot index') + + self.getPage('/1/handler') + self.assertBody('SubRoot handler') + + self.getPage('/2/handler') + self.assertBody('SubRoot handler') + + # Dynamic dispatch will fail here for the subnodes + # so the default gets called + self.getPage('/asdf/') + self.assertBody("default ('asdf',)") + + self.getPage('/asdf/asdf') + self.assertBody("default ('asdf', 'asdf')") + + self.getPage('/asdf/handler') + self.assertBody("default ('asdf', 'handler')") + + # Dynamic dispatch will succeed here for the subsubnodes + # so the subsubroot gets called + self.getPage('/1/1/') + self.assertBody('SubSubRoot index') + + self.getPage('/2/2/') + self.assertBody('SubSubRoot index') + + self.getPage('/1/1/handler') + self.assertBody('SubSubRoot handler') + + self.getPage('/2/2/handler') + self.assertBody('SubSubRoot handler') + + self.getPage('/2/2/dispatch') + self.assertBody('SubSubRoot dispatch') + + # The exposed dispatch will not be called as a dispatch + # method. + self.getPage('/2/2/foo/foo') + self.assertBody("SubSubRoot default") + + # Dynamic dispatch will fail here for the subsubnodes + # so the SubRoot gets called + self.getPage('/1/asdf/') + self.assertBody("SubRoot ('asdf',)") + + self.getPage('/1/asdf/asdf') + self.assertBody("SubRoot ('asdf', 'asdf')") + + self.getPage('/1/asdf/handler') + self.assertBody("SubRoot ('asdf', 'handler')") + + def testMethodDispatch(self): + # GET acts like a container + self.getPage("/users") + self.assertBody("[1, 2]") + self.assertHeader('Allow', 'GET, HEAD, POST') + + # POST to the container URI allows creation + self.getPage("/users", method="POST", body="name=baz") + self.assertBody("POST 3") + self.assertHeader('Allow', 'GET, HEAD, POST') + + # POST to a specific instanct URI results in a 404 + # as the resource does not exit. + self.getPage("/users/5", method="POST", body="name=baz") + self.assertStatus(404) + + # PUT to a specific instanct URI results in creation + self.getPage("/users/5", method="PUT", body="name=boris") + self.assertBody("PUT 5") + self.assertHeader('Allow', 'DELETE, GET, HEAD, POST, PUT') + + # GET acts like a container + self.getPage("/users") + self.assertBody("[1, 2, 3, 5]") + self.assertHeader('Allow', 'GET, HEAD, POST') + + test_cases = ( + (1, 'foo', 'fooupdated', 'DELETE, GET, HEAD, POST, PUT'), + (2, 'bar', 'barupdated', 'DELETE, GET, HEAD, POST, PUT'), + (3, 'baz', 'bazupdated', 'DELETE, GET, HEAD, POST, PUT'), + (5, 'boris', 'borisupdated', 'DELETE, GET, HEAD, POST, PUT'), + ) + for id, name, updatedname, headers in test_cases: + self.getPage("/users/%d" % id) + self.assertBody(name) + self.assertHeader('Allow', headers) + + # Make sure POSTs update already existings resources + self.getPage("/users/%d" % + id, method='POST', body="name=%s" % updatedname) + self.assertBody("POST %d" % id) + self.assertHeader('Allow', headers) + + # Make sure PUTs Update already existing resources. + self.getPage("/users/%d" % + id, method='PUT', body="name=%s" % updatedname) + self.assertBody("PUT %d" % id) + self.assertHeader('Allow', headers) + + # Make sure DELETES Remove already existing resources. + self.getPage("/users/%d" % id, method='DELETE') + self.assertBody("DELETE %d" % id) + self.assertHeader('Allow', headers) + + # GET acts like a container + self.getPage("/users") + self.assertBody("[]") + self.assertHeader('Allow', 'GET, HEAD, POST') + + def testVpathDispatch(self): + self.getPage("/decorated/") + self.assertBody("no params") + + self.getPage("/decorated/hi") + self.assertBody("hi was not interpreted as 'a' param") + + self.getPage("/decorated/yo/") + self.assertBody("a:yo") + + self.getPage("/decorated/yo/there/") + self.assertBody("a:yo,b:there") + + self.getPage("/decorated/yo/there/delete") + self.assertBody("deleting yo and there") + + self.getPage("/decorated/yo/there/handled_by_dispatch/") + self.assertBody("custom") + + self.getPage("/undecorated/blah/") + self.assertBody("index: blah") + + self.getPage("/index_only/a/b/c/d/e/f/g/") + self.assertBody("IndexOnly index") + + self.getPage("/parameter_test/argument2/") + self.assertBody("argument2") diff --git a/deps/cherrypy/test/test_encoding.py b/deps/cherrypy/test/test_encoding.py new file mode 100644 index 00000000..633ac7e1 --- /dev/null +++ b/deps/cherrypy/test/test_encoding.py @@ -0,0 +1,415 @@ +# coding: utf-8 + +import gzip +import io + +import six + +import mock + +import cherrypy +from cherrypy._cpcompat import IncompleteRead, ntob, ntou + +europoundUnicode = ntou('£', encoding='utf-8') +sing = ntou("毛泽东: Sing, Little Birdie?", encoding='utf-8') + +sing8 = sing.encode('utf-8') +sing16 = sing.encode('utf-16') + + +from cherrypy.test import helper + + +class EncodingTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self, param): + assert param == europoundUnicode, "%r != %r" % ( + param, europoundUnicode) + yield europoundUnicode + + @cherrypy.expose + def mao_zedong(self): + return sing + + @cherrypy.expose + @cherrypy.config(**{'tools.encode.encoding': 'utf-8'}) + def utf8(self): + return sing8 + + @cherrypy.expose + def cookies_and_headers(self): + # if the headers have non-ascii characters and a cookie has + # any part which is unicode (even ascii), the response + # should not fail. + cherrypy.response.cookie['candy'] = 'bar' + cherrypy.response.cookie['candy']['domain'] = 'cherrypy.org' + cherrypy.response.headers[ + 'Some-Header'] = 'My d\xc3\xb6g has fleas' + return 'Any content' + + @cherrypy.expose + def reqparams(self, *args, **kwargs): + return ntob(', ').join( + [": ".join((k, v)).encode('utf8') + for k, v in sorted(cherrypy.request.params.items())] + ) + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.encode.text_only': False, + 'tools.encode.add_charset': True, + }) + def nontext(self, *args, **kwargs): + cherrypy.response.headers[ + 'Content-Type'] = 'application/binary' + return '\x00\x01\x02\x03' + + class GZIP: + + @cherrypy.expose + def index(self): + yield "Hello, world" + + @cherrypy.expose + # Turn encoding off so the gzip tool is the one doing the collapse. + @cherrypy.config(**{'tools.encode.on': False}) + def noshow(self): + # Test for ticket #147, where yield showed no exceptions + # (content-encoding was still gzip even though traceback + # wasn't zipped). + raise IndexError() + yield "Here be dragons" + + @cherrypy.expose + @cherrypy.config(**{'response.stream': True}) + def noshow_stream(self): + # Test for ticket #147, where yield showed no exceptions + # (content-encoding was still gzip even though traceback + # wasn't zipped). + raise IndexError() + yield "Here be dragons" + + class Decode: + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.decode.on': True, + 'tools.decode.default_encoding': ['utf-16'], + }) + def extra_charset(self, *args, **kwargs): + return ', '.join([": ".join((k, v)) + for k, v in cherrypy.request.params.items()]) + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.decode.on': True, + 'tools.decode.encoding': 'utf-16', + }) + def force_charset(self, *args, **kwargs): + return ', '.join([": ".join((k, v)) + for k, v in cherrypy.request.params.items()]) + + root = Root() + root.gzip = GZIP() + root.decode = Decode() + cherrypy.tree.mount(root, config={'/gzip': {'tools.gzip.on': True}}) + + def test_query_string_decoding(self): + if six.PY3: + # This test fails on Python 3. See #1443 + return + europoundUtf8 = europoundUnicode.encode('utf-8') + self.getPage(ntob('/?param=') + europoundUtf8) + self.assertBody(europoundUtf8) + + # Encoded utf8 query strings MUST be parsed correctly. + # Here, q is the POUND SIGN U+00A3 encoded in utf8 and then %HEX + self.getPage("/reqparams?q=%C2%A3") + # The return value will be encoded as utf8. + self.assertBody(ntob("q: \xc2\xa3")) + + # Query strings that are incorrectly encoded MUST raise 404. + # Here, q is the POUND SIGN U+00A3 encoded in latin1 and then %HEX + self.getPage("/reqparams?q=%A3") + self.assertStatus(404) + self.assertErrorPage( + 404, + "The given query string could not be processed. Query " + "strings for this resource must be encoded with 'utf8'.") + + def test_urlencoded_decoding(self): + # Test the decoding of an application/x-www-form-urlencoded entity. + europoundUtf8 = europoundUnicode.encode('utf-8') + body = ntob("param=") + europoundUtf8 + self.getPage('/', + method='POST', + headers=[ + ("Content-Type", "application/x-www-form-urlencoded"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(europoundUtf8) + + # Encoded utf8 entities MUST be parsed and decoded correctly. + # Here, q is the POUND SIGN U+00A3 encoded in utf8 + body = ntob("q=\xc2\xa3") + self.getPage('/reqparams', method='POST', + headers=[( + "Content-Type", "application/x-www-form-urlencoded"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(ntob("q: \xc2\xa3")) + + # ...and in utf16, which is not in the default attempt_charsets list: + body = ntob("\xff\xfeq\x00=\xff\xfe\xa3\x00") + self.getPage('/reqparams', + method='POST', + headers=[ + ("Content-Type", + "application/x-www-form-urlencoded;charset=utf-16"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(ntob("q: \xc2\xa3")) + + # Entities that are incorrectly encoded MUST raise 400. + # Here, q is the POUND SIGN U+00A3 encoded in utf16, but + # the Content-Type incorrectly labels it utf-8. + body = ntob("\xff\xfeq\x00=\xff\xfe\xa3\x00") + self.getPage('/reqparams', + method='POST', + headers=[ + ("Content-Type", + "application/x-www-form-urlencoded;charset=utf-8"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertStatus(400) + self.assertErrorPage( + 400, + "The request entity could not be decoded. The following charsets " + "were attempted: ['utf-8']") + + def test_decode_tool(self): + # An extra charset should be tried first, and succeed if it matches. + # Here, we add utf-16 as a charset and pass a utf-16 body. + body = ntob("\xff\xfeq\x00=\xff\xfe\xa3\x00") + self.getPage('/decode/extra_charset', method='POST', + headers=[( + "Content-Type", "application/x-www-form-urlencoded"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(ntob("q: \xc2\xa3")) + + # An extra charset should be tried first, and continue to other default + # charsets if it doesn't match. + # Here, we add utf-16 as a charset but still pass a utf-8 body. + body = ntob("q=\xc2\xa3") + self.getPage('/decode/extra_charset', method='POST', + headers=[( + "Content-Type", "application/x-www-form-urlencoded"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(ntob("q: \xc2\xa3")) + + # An extra charset should error if force is True and it doesn't match. + # Here, we force utf-16 as a charset but still pass a utf-8 body. + body = ntob("q=\xc2\xa3") + self.getPage('/decode/force_charset', method='POST', + headers=[( + "Content-Type", "application/x-www-form-urlencoded"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertErrorPage( + 400, + "The request entity could not be decoded. The following charsets " + "were attempted: ['utf-16']") + + def test_multipart_decoding(self): + # Test the decoding of a multipart entity when the charset (utf16) is + # explicitly given. + body = ntob('\r\n'.join([ + '--X', + 'Content-Type: text/plain;charset=utf-16', + 'Content-Disposition: form-data; name="text"', + '', + '\xff\xfea\x00b\x00\x1c c\x00', + '--X', + 'Content-Type: text/plain;charset=utf-16', + 'Content-Disposition: form-data; name="submit"', + '', + '\xff\xfeC\x00r\x00e\x00a\x00t\x00e\x00', + '--X--' + ])) + self.getPage('/reqparams', method='POST', + headers=[( + "Content-Type", "multipart/form-data;boundary=X"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(ntob("submit: Create, text: ab\xe2\x80\x9cc")) + + @mock.patch('cherrypy._cpreqbody.Part.maxrambytes', 1) + def test_multipart_decoding_bigger_maxrambytes(self): + """ + Decoding of a multipart entity should also pass when + the entity is bigger than maxrambytes. See ticket #1352. + """ + self.test_multipart_decoding() + + def test_multipart_decoding_no_charset(self): + # Test the decoding of a multipart entity when the charset (utf8) is + # NOT explicitly given, but is in the list of charsets to attempt. + body = ntob('\r\n'.join([ + '--X', + 'Content-Disposition: form-data; name="text"', + '', + '\xe2\x80\x9c', + '--X', + 'Content-Disposition: form-data; name="submit"', + '', + 'Create', + '--X--' + ])) + self.getPage('/reqparams', method='POST', + headers=[( + "Content-Type", "multipart/form-data;boundary=X"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody(ntob("submit: Create, text: \xe2\x80\x9c")) + + def test_multipart_decoding_no_successful_charset(self): + # Test the decoding of a multipart entity when the charset (utf16) is + # NOT explicitly given, and is NOT in the list of charsets to attempt. + body = ntob('\r\n'.join([ + '--X', + 'Content-Disposition: form-data; name="text"', + '', + '\xff\xfea\x00b\x00\x1c c\x00', + '--X', + 'Content-Disposition: form-data; name="submit"', + '', + '\xff\xfeC\x00r\x00e\x00a\x00t\x00e\x00', + '--X--' + ])) + self.getPage('/reqparams', method='POST', + headers=[( + "Content-Type", "multipart/form-data;boundary=X"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertStatus(400) + self.assertErrorPage( + 400, + "The request entity could not be decoded. The following charsets " + "were attempted: ['us-ascii', 'utf-8']") + + def test_nontext(self): + self.getPage('/nontext') + self.assertHeader('Content-Type', 'application/binary;charset=utf-8') + self.assertBody('\x00\x01\x02\x03') + + def testEncoding(self): + # Default encoding should be utf-8 + self.getPage('/mao_zedong') + self.assertBody(sing8) + + # Ask for utf-16. + self.getPage('/mao_zedong', [('Accept-Charset', 'utf-16')]) + self.assertHeader('Content-Type', 'text/html;charset=utf-16') + self.assertBody(sing16) + + # Ask for multiple encodings. ISO-8859-1 should fail, and utf-16 + # should be produced. + self.getPage('/mao_zedong', [('Accept-Charset', + 'iso-8859-1;q=1, utf-16;q=0.5')]) + self.assertBody(sing16) + + # The "*" value should default to our default_encoding, utf-8 + self.getPage('/mao_zedong', [('Accept-Charset', '*;q=1, utf-7;q=.2')]) + self.assertBody(sing8) + + # Only allow iso-8859-1, which should fail and raise 406. + self.getPage('/mao_zedong', [('Accept-Charset', 'iso-8859-1, *;q=0')]) + self.assertStatus("406 Not Acceptable") + self.assertInBody("Your client sent this Accept-Charset header: " + "iso-8859-1, *;q=0. We tried these charsets: " + "iso-8859-1.") + + # Ask for x-mac-ce, which should be unknown. See ticket #569. + self.getPage('/mao_zedong', [('Accept-Charset', + 'us-ascii, ISO-8859-1, x-mac-ce')]) + self.assertStatus("406 Not Acceptable") + self.assertInBody("Your client sent this Accept-Charset header: " + "us-ascii, ISO-8859-1, x-mac-ce. We tried these " + "charsets: ISO-8859-1, us-ascii, x-mac-ce.") + + # Test the 'encoding' arg to encode. + self.getPage('/utf8') + self.assertBody(sing8) + self.getPage('/utf8', [('Accept-Charset', 'us-ascii, ISO-8859-1')]) + self.assertStatus("406 Not Acceptable") + + def testGzip(self): + zbuf = io.BytesIO() + zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=9) + zfile.write(ntob("Hello, world")) + zfile.close() + + self.getPage('/gzip/', headers=[("Accept-Encoding", "gzip")]) + self.assertInBody(zbuf.getvalue()[:3]) + self.assertHeader("Vary", "Accept-Encoding") + self.assertHeader("Content-Encoding", "gzip") + + # Test when gzip is denied. + self.getPage('/gzip/', headers=[("Accept-Encoding", "identity")]) + self.assertHeader("Vary", "Accept-Encoding") + self.assertNoHeader("Content-Encoding") + self.assertBody("Hello, world") + + self.getPage('/gzip/', headers=[("Accept-Encoding", "gzip;q=0")]) + self.assertHeader("Vary", "Accept-Encoding") + self.assertNoHeader("Content-Encoding") + self.assertBody("Hello, world") + + self.getPage('/gzip/', headers=[("Accept-Encoding", "*;q=0")]) + self.assertStatus(406) + self.assertNoHeader("Content-Encoding") + self.assertErrorPage(406, "identity, gzip") + + # Test for ticket #147 + self.getPage('/gzip/noshow', headers=[("Accept-Encoding", "gzip")]) + self.assertNoHeader('Content-Encoding') + self.assertStatus(500) + self.assertErrorPage(500, pattern="IndexError\n") + + # In this case, there's nothing we can do to deliver a + # readable page, since 1) the gzip header is already set, + # and 2) we may have already written some of the body. + # The fix is to never stream yields when using gzip. + if (cherrypy.server.protocol_version == "HTTP/1.0" or + getattr(cherrypy.server, "using_apache", False)): + self.getPage('/gzip/noshow_stream', + headers=[("Accept-Encoding", "gzip")]) + self.assertHeader('Content-Encoding', 'gzip') + self.assertInBody('\x1f\x8b\x08\x00') + else: + # The wsgiserver will simply stop sending data, and the HTTP client + # will error due to an incomplete chunk-encoded stream. + self.assertRaises((ValueError, IncompleteRead), self.getPage, + '/gzip/noshow_stream', + headers=[("Accept-Encoding", "gzip")]) + + def test_UnicodeHeaders(self): + self.getPage('/cookies_and_headers') + self.assertBody('Any content') diff --git a/deps/cherrypy/test/test_etags.py b/deps/cherrypy/test/test_etags.py new file mode 100644 index 00000000..0ac6ef84 --- /dev/null +++ b/deps/cherrypy/test/test_etags.py @@ -0,0 +1,84 @@ +import cherrypy +from cherrypy._cpcompat import ntou +from cherrypy.test import helper + + +class ETagTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def resource(self): + return "Oh wah ta goo Siam." + + @cherrypy.expose + def fail(self, code): + code = int(code) + if 300 <= code <= 399: + raise cherrypy.HTTPRedirect([], code) + else: + raise cherrypy.HTTPError(code) + + @cherrypy.expose + # In Python 3, tools.encode is on by default + @cherrypy.config(**{'tools.encode.on': True}) + def unicoded(self): + return ntou('I am a \u1ee4nicode string.', 'escape') + + conf = {'/': {'tools.etags.on': True, + 'tools.etags.autotags': True, + }} + cherrypy.tree.mount(Root(), config=conf) + + def test_etags(self): + self.getPage("/resource") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html;charset=utf-8') + self.assertBody('Oh wah ta goo Siam.') + etag = self.assertHeader('ETag') + + # Test If-Match (both valid and invalid) + self.getPage("/resource", headers=[('If-Match', etag)]) + self.assertStatus("200 OK") + self.getPage("/resource", headers=[('If-Match', "*")]) + self.assertStatus("200 OK") + self.getPage("/resource", headers=[('If-Match', "*")], method="POST") + self.assertStatus("200 OK") + self.getPage("/resource", headers=[('If-Match', "a bogus tag")]) + self.assertStatus("412 Precondition Failed") + + # Test If-None-Match (both valid and invalid) + self.getPage("/resource", headers=[('If-None-Match', etag)]) + self.assertStatus(304) + self.getPage("/resource", method='POST', + headers=[('If-None-Match', etag)]) + self.assertStatus("412 Precondition Failed") + self.getPage("/resource", headers=[('If-None-Match', "*")]) + self.assertStatus(304) + self.getPage("/resource", headers=[('If-None-Match', "a bogus tag")]) + self.assertStatus("200 OK") + + def test_errors(self): + self.getPage("/resource") + self.assertStatus(200) + etag = self.assertHeader('ETag') + + # Test raising errors in page handler + self.getPage("/fail/412", headers=[('If-Match', etag)]) + self.assertStatus(412) + self.getPage("/fail/304", headers=[('If-Match', etag)]) + self.assertStatus(304) + self.getPage("/fail/412", headers=[('If-None-Match', "*")]) + self.assertStatus(412) + self.getPage("/fail/304", headers=[('If-None-Match', "*")]) + self.assertStatus(304) + + def test_unicode_body(self): + self.getPage("/unicoded") + self.assertStatus(200) + etag1 = self.assertHeader('ETag') + self.getPage("/unicoded", headers=[('If-Match', etag1)]) + self.assertStatus(200) + self.assertHeader('ETag', etag1) diff --git a/deps/cherrypy/test/test_http.py b/deps/cherrypy/test/test_http.py new file mode 100644 index 00000000..48b60811 --- /dev/null +++ b/deps/cherrypy/test/test_http.py @@ -0,0 +1,262 @@ +"""Tests for managing HTTP issues (malformed requests, etc).""" + +import errno +import mimetypes +import socket +import sys + +import six + +from mock import patch + +import cherrypy +from cherrypy._cpcompat import HTTPConnection, HTTPSConnection, ntob + + +def encode_multipart_formdata(files): + """Return (content_type, body) ready for httplib.HTTP instance. + + files: a sequence of (name, filename, value) tuples for multipart uploads. + """ + BOUNDARY = '________ThIs_Is_tHe_bouNdaRY_$' + L = [] + for key, filename, value in files: + L.append('--' + BOUNDARY) + L.append('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)) + ct = mimetypes.guess_type(filename)[0] or 'application/octet-stream' + L.append('Content-Type: %s' % ct) + L.append('') + L.append(value) + L.append('--' + BOUNDARY + '--') + L.append('') + body = '\r\n'.join(L) + content_type = 'multipart/form-data; boundary=%s' % BOUNDARY + return content_type, body + + +from cherrypy.test import helper + + +class HTTPTests(helper.CPWebCase): + + def make_connection(self): + if self.scheme == "https": + return HTTPSConnection('%s:%s' % (self.interface(), self.PORT)) + else: + return HTTPConnection('%s:%s' % (self.interface(), self.PORT)) + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self, *args, **kwargs): + return "Hello world!" + + @cherrypy.expose + @cherrypy.config(**{'request.process_request_body': False}) + def no_body(self, *args, **kwargs): + return "Hello world!" + + @cherrypy.expose + def post_multipart(self, file): + """Return a summary ("a * 65536\nb * 65536") of the uploaded + file. + """ + contents = file.file.read() + summary = [] + curchar = None + count = 0 + for c in contents: + if c == curchar: + count += 1 + else: + if count: + if six.PY3: + curchar = chr(curchar) + summary.append("%s * %d" % (curchar, count)) + count = 1 + curchar = c + if count: + if six.PY3: + curchar = chr(curchar) + summary.append("%s * %d" % (curchar, count)) + return ", ".join(summary) + + @cherrypy.expose + def post_filename(self, myfile): + '''Return the name of the file which was uploaded.''' + return myfile.filename + + cherrypy.tree.mount(Root()) + cherrypy.config.update({'server.max_request_body_size': 30000000}) + + def test_no_content_length(self): + # "The presence of a message-body in a request is signaled by the + # inclusion of a Content-Length or Transfer-Encoding header field in + # the request's message-headers." + # + # Send a message with neither header and no body. Even though + # the request is of method POST, this should be OK because we set + # request.process_request_body to False for our handler. + c = self.make_connection() + c.request("POST", "/no_body") + response = c.getresponse() + self.body = response.fp.read() + self.status = str(response.status) + self.assertStatus(200) + self.assertBody(ntob('Hello world!')) + + # Now send a message that has no Content-Length, but does send a body. + # Verify that CP times out the socket and responds + # with 411 Length Required. + if self.scheme == "https": + c = HTTPSConnection('%s:%s' % (self.interface(), self.PORT)) + else: + c = HTTPConnection('%s:%s' % (self.interface(), self.PORT)) + + # `_get_content_length` is needed for Python 3.6+ + with patch.object(c, '_get_content_length', lambda body, method: None, create=True): + # `_set_content_length` is needed for Python 2.7-3.5 + with patch.object(c, '_set_content_length', create=True): + c.request("POST", "/") + + response = c.getresponse() + self.body = response.fp.read() + self.status = str(response.status) + self.assertStatus(411) + + def test_post_multipart(self): + alphabet = "abcdefghijklmnopqrstuvwxyz" + # generate file contents for a large post + contents = "".join([c * 65536 for c in alphabet]) + + # encode as multipart form data + files = [('file', 'file.txt', contents)] + content_type, body = encode_multipart_formdata(files) + body = body.encode('Latin-1') + + # post file + c = self.make_connection() + c.putrequest('POST', '/post_multipart') + c.putheader('Content-Type', content_type) + c.putheader('Content-Length', str(len(body))) + c.endheaders() + c.send(body) + + response = c.getresponse() + self.body = response.fp.read() + self.status = str(response.status) + self.assertStatus(200) + self.assertBody(", ".join(["%s * 65536" % c for c in alphabet])) + + def test_post_filename_with_special_characters(self): + '''Testing that we can handle filenames with special characters. This + was reported as a bug in: + https://github.com/cherrypy/cherrypy/issues/1146/ + https://github.com/cherrypy/cherrypy/issues/1397''' + # We'll upload a bunch of files with differing names. + fnames = ['boop.csv', 'foo, bar.csv', 'bar, xxxx.csv', 'file"name.csv', + 'file;name.csv', 'file; name.csv'] + for fname in fnames: + files = [('myfile', fname, 'yunyeenyunyue')] + content_type, body = encode_multipart_formdata(files) + body = body.encode('Latin-1') + + # post file + c = self.make_connection() + c.putrequest('POST', '/post_filename') + c.putheader('Content-Type', content_type) + c.putheader('Content-Length', str(len(body))) + c.endheaders() + c.send(body) + + response = c.getresponse() + self.body = response.fp.read() + self.status = str(response.status) + self.assertStatus(200) + self.assertBody(fname) + + def test_malformed_request_line(self): + if getattr(cherrypy.server, "using_apache", False): + return self.skip("skipped due to known Apache differences...") + + # Test missing version in Request-Line + c = self.make_connection() + c._output(ntob('GET /')) + c._send_output() + if hasattr(c, 'strict'): + response = c.response_class(c.sock, strict=c.strict, method='GET') + else: + # Python 3.2 removed the 'strict' feature, saying: + # "http.client now always assumes HTTP/1.x compliant servers." + response = c.response_class(c.sock, method='GET') + response.begin() + self.assertEqual(response.status, 400) + self.assertEqual(response.fp.read(22), ntob("Malformed Request-Line")) + c.close() + + def test_request_line_split_issue_1220(self): + Request_URI = ( + '/index?intervenant-entreprise-evenement_classaction=evenement-mailremerciements' + '&_path=intervenant-entreprise-evenement&intervenant-entreprise-evenement_action-id=19404' + '&intervenant-entreprise-evenement_id=19404&intervenant-entreprise_id=28092' + ) + self.assertEqual(len("GET %s HTTP/1.1\r\n" % Request_URI), 256) + self.getPage(Request_URI) + self.assertBody("Hello world!") + + def test_malformed_header(self): + c = self.make_connection() + c.putrequest('GET', '/') + c.putheader('Content-Type', 'text/plain') + # See https://github.com/cherrypy/cherrypy/issues/941 + c._output(ntob('Re, 1.2.3.4#015#012')) + c.endheaders() + + response = c.getresponse() + self.status = str(response.status) + self.assertStatus(400) + self.body = response.fp.read(20) + self.assertBody("Illegal header line.") + + def test_http_over_https(self): + if self.scheme != 'https': + return self.skip("skipped (not running HTTPS)... ") + + # Try connecting without SSL. + conn = HTTPConnection('%s:%s' % (self.interface(), self.PORT)) + conn.putrequest("GET", "/", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.response_class(conn.sock, method="GET") + try: + response.begin() + self.assertEqual(response.status, 400) + self.body = response.read() + self.assertBody("The client sent a plain HTTP request, but this " + "server only speaks HTTPS on this port.") + except socket.error: + e = sys.exc_info()[1] + # "Connection reset by peer" is also acceptable. + if e.errno != errno.ECONNRESET: + raise + + def test_garbage_in(self): + # Connect without SSL regardless of server.scheme + c = HTTPConnection('%s:%s' % (self.interface(), self.PORT)) + c._output(ntob('gjkgjklsgjklsgjkljklsg')) + c._send_output() + response = c.response_class(c.sock, method="GET") + try: + response.begin() + self.assertEqual(response.status, 400) + self.assertEqual(response.fp.read(22), + ntob("Malformed Request-Line")) + c.close() + except socket.error: + e = sys.exc_info()[1] + # "Connection reset by peer" is also acceptable. + if e.errno != errno.ECONNRESET: + raise diff --git a/deps/cherrypy/test/test_httpauth.py b/deps/cherrypy/test/test_httpauth.py new file mode 100644 index 00000000..db2b9445 --- /dev/null +++ b/deps/cherrypy/test/test_httpauth.py @@ -0,0 +1,195 @@ +from hashlib import md5, sha1 + +import cherrypy +from cherrypy._cpcompat import ntob +from cherrypy.lib import httpauth + +from cherrypy.test import helper + + +class HTTPAuthTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "This is public." + + class DigestProtected: + + @cherrypy.expose + def index(self): + return "Hello %s, you've been authorized." % ( + cherrypy.request.login) + + class BasicProtected: + + @cherrypy.expose + def index(self): + return "Hello %s, you've been authorized." % ( + cherrypy.request.login) + + class BasicProtected2: + + @cherrypy.expose + def index(self): + return "Hello %s, you've been authorized." % ( + cherrypy.request.login) + + def fetch_users(): + return {'test': 'test'} + + def sha_password_encrypter(password): + return sha1(ntob(password)).hexdigest() + + def fetch_password(username): + return sha1(ntob('test')).hexdigest() + + conf = { + '/digest': { + 'tools.digest_auth.on': True, + 'tools.digest_auth.realm': 'localhost', + 'tools.digest_auth.users': fetch_users + }, + '/basic': { + 'tools.basic_auth.on': True, + 'tools.basic_auth.realm': 'localhost', + 'tools.basic_auth.users': { + 'test': md5(ntob('test')).hexdigest() + } + }, + '/basic2': { + 'tools.basic_auth.on': True, + 'tools.basic_auth.realm': 'localhost', + 'tools.basic_auth.users': fetch_password, + 'tools.basic_auth.encrypt': sha_password_encrypter + } + } + + root = Root() + root.digest = DigestProtected() + root.basic = BasicProtected() + root.basic2 = BasicProtected2() + cherrypy.tree.mount(root, config=conf) + + def testPublic(self): + self.getPage("/") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html;charset=utf-8') + self.assertBody('This is public.') + + def testBasic(self): + self.getPage("/basic/") + self.assertStatus(401) + self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"') + + self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZX60')]) + self.assertStatus(401) + + self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZXN0')]) + self.assertStatus('200 OK') + self.assertBody("Hello test, you've been authorized.") + + def testBasic2(self): + self.getPage("/basic2/") + self.assertStatus(401) + self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"') + + self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZX60')]) + self.assertStatus(401) + + self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZXN0')]) + self.assertStatus('200 OK') + self.assertBody("Hello test, you've been authorized.") + + def testDigest(self): + self.getPage("/digest/") + self.assertStatus(401) + + value = None + for k, v in self.headers: + if k.lower() == "www-authenticate": + if v.startswith("Digest"): + value = v + break + + if value is None: + self._handlewebError( + "Digest authentification scheme was not found") + + value = value[7:] + items = value.split(', ') + tokens = {} + for item in items: + key, value = item.split('=') + tokens[key.lower()] = value + + missing_msg = "%s is missing" + bad_value_msg = "'%s' was expecting '%s' but found '%s'" + nonce = None + if 'realm' not in tokens: + self._handlewebError(missing_msg % 'realm') + elif tokens['realm'] != '"localhost"': + self._handlewebError(bad_value_msg % + ('realm', '"localhost"', tokens['realm'])) + if 'nonce' not in tokens: + self._handlewebError(missing_msg % 'nonce') + else: + nonce = tokens['nonce'].strip('"') + if 'algorithm' not in tokens: + self._handlewebError(missing_msg % 'algorithm') + elif tokens['algorithm'] != '"MD5"': + self._handlewebError(bad_value_msg % + ('algorithm', '"MD5"', tokens['algorithm'])) + if 'qop' not in tokens: + self._handlewebError(missing_msg % 'qop') + elif tokens['qop'] != '"auth"': + self._handlewebError(bad_value_msg % + ('qop', '"auth"', tokens['qop'])) + + # Test a wrong 'realm' value + base_auth = ( + 'Digest ' + 'username="test", ' + 'realm="wrong realm", ' + 'nonce="%s", ' + 'uri="/digest/", ' + 'algorithm=MD5, ' + 'response="%s", ' + 'qop=auth, ' + 'nc=%s, ' + 'cnonce="1522e61005789929"' + ) + + auth = base_auth % (nonce, '', '00000001') + params = httpauth.parseAuthorization(auth) + response = httpauth._computeDigestResponse(params, 'test') + + auth = base_auth % (nonce, response, '00000001') + self.getPage('/digest/', [('Authorization', auth)]) + self.assertStatus(401) + + # Test that must pass + base_auth = ( + 'Digest ' + 'username="test", ' + 'realm="localhost", ' + 'nonce="%s", ' + 'uri="/digest/", ' + 'algorithm=MD5, ' + 'response="%s", ' + 'qop=auth, ' + 'nc=%s, ' + 'cnonce="1522e61005789929"' + ) + + auth = base_auth % (nonce, '', '00000001') + params = httpauth.parseAuthorization(auth) + response = httpauth._computeDigestResponse(params, 'test') + + auth = base_auth % (nonce, response, '00000001') + self.getPage('/digest/', [('Authorization', auth)]) + self.assertStatus('200 OK') + self.assertBody("Hello test, you've been authorized.") diff --git a/deps/cherrypy/test/test_httplib.py b/deps/cherrypy/test/test_httplib.py new file mode 100644 index 00000000..76ca7641 --- /dev/null +++ b/deps/cherrypy/test/test_httplib.py @@ -0,0 +1,29 @@ +"""Tests for cherrypy/lib/httputil.py.""" + +import unittest +from cherrypy.lib import httputil + + +class UtilityTests(unittest.TestCase): + + def test_urljoin(self): + # Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO + self.assertEqual(httputil.urljoin("/sn/", "/pi/"), "/sn/pi/") + self.assertEqual(httputil.urljoin("/sn/", "/pi"), "/sn/pi") + self.assertEqual(httputil.urljoin("/sn/", "/"), "/sn/") + self.assertEqual(httputil.urljoin("/sn/", ""), "/sn/") + self.assertEqual(httputil.urljoin("/sn", "/pi/"), "/sn/pi/") + self.assertEqual(httputil.urljoin("/sn", "/pi"), "/sn/pi") + self.assertEqual(httputil.urljoin("/sn", "/"), "/sn/") + self.assertEqual(httputil.urljoin("/sn", ""), "/sn") + self.assertEqual(httputil.urljoin("/", "/pi/"), "/pi/") + self.assertEqual(httputil.urljoin("/", "/pi"), "/pi") + self.assertEqual(httputil.urljoin("/", "/"), "/") + self.assertEqual(httputil.urljoin("/", ""), "/") + self.assertEqual(httputil.urljoin("", "/pi/"), "/pi/") + self.assertEqual(httputil.urljoin("", "/pi"), "/pi") + self.assertEqual(httputil.urljoin("", "/"), "/") + self.assertEqual(httputil.urljoin("", ""), "/") + +if __name__ == '__main__': + unittest.main() diff --git a/deps/cherrypy/test/test_iterator.py b/deps/cherrypy/test/test_iterator.py new file mode 100644 index 00000000..28d5049e --- /dev/null +++ b/deps/cherrypy/test/test_iterator.py @@ -0,0 +1,189 @@ +import six + +import cherrypy + + +class IteratorBase(object): + + created = 0 + datachunk = 'butternut squash' * 256 + + @classmethod + def incr(cls): + cls.created += 1 + + @classmethod + def decr(cls): + cls.created -= 1 + +class OurGenerator(IteratorBase): + + def __iter__(self): + self.incr() + try: + for i in range(1024): + yield self.datachunk + finally: + self.decr() + +class OurIterator(IteratorBase): + + started = False + closed_off = False + count = 0 + + def increment(self): + self.incr() + + def decrement(self): + if not self.closed_off: + self.closed_off = True + self.decr() + + def __iter__(self): + return self + + def __next__(self): + if not self.started: + self.started = True + self.increment() + self.count += 1 + if self.count > 1024: + raise StopIteration + return self.datachunk + + next = __next__ + + def __del__(self): + self.decrement() + +class OurClosableIterator(OurIterator): + + def close(self): + self.decrement() + +class OurNotClosableIterator(OurIterator): + + # We can't close something which requires an additional argument. + def close(self, somearg): + self.decrement() + +class OurUnclosableIterator(OurIterator): + close = 'close' # not callable! + +from cherrypy.test import helper +class IteratorTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + + class Root(object): + + @cherrypy.expose + def count(self, clsname): + cherrypy.response.headers['Content-Type'] = 'text/plain' + return six.text_type(globals()[clsname].created) + + @cherrypy.expose + def getall(self, clsname): + cherrypy.response.headers['Content-Type'] = 'text/plain' + return globals()[clsname]() + + @cherrypy.expose + @cherrypy.config(**{'response.stream': True}) + def stream(self, clsname): + return self.getall(clsname) + + cherrypy.tree.mount(Root()) + + def test_iterator(self): + try: + self._test_iterator() + except Exception: + "Test fails intermittently. See #1419" + + def _test_iterator(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Check the counts of all the classes, they should be zero. + closables = ['OurClosableIterator', 'OurGenerator'] + unclosables = ['OurUnclosableIterator', 'OurNotClosableIterator'] + all_classes = closables + unclosables + + import random + random.shuffle(all_classes) + + for clsname in all_classes: + self.getPage("/count/" + clsname) + self.assertStatus(200) + self.assertBody('0') + + # We should also be able to read the entire content body + # successfully, though we don't need to, we just want to + # check the header. + for clsname in all_classes: + itr_conn = self.get_conn() + itr_conn.putrequest("GET", "/getall/" + clsname) + itr_conn.endheaders() + response = itr_conn.getresponse() + self.assertEqual(response.status, 200) + headers = response.getheaders() + for header_name, header_value in headers: + if header_name.lower() == 'content-length': + assert header_value == six.text_type(1024 * 16 * 256), header_value + break + else: + raise AssertionError('No Content-Length header found') + + # As the response should be fully consumed by CherryPy + # before sending back, the count should still be at zero + # by the time the response has been sent. + self.getPage("/count/" + clsname) + self.assertStatus(200) + self.assertBody('0') + + # Now we do the same check with streaming - some classes will + # be automatically closed, while others cannot. + stream_counts = {} + for clsname in all_classes: + itr_conn = self.get_conn() + itr_conn.putrequest("GET", "/stream/" + clsname) + itr_conn.endheaders() + response = itr_conn.getresponse() + self.assertEqual(response.status, 200) + response.fp.read(65536) + + # Let's check the count - this should always be one. + self.getPage("/count/" + clsname) + self.assertBody('1') + + # Now if we close the connection, the count should go back + # to zero. + itr_conn.close() + self.getPage("/count/" + clsname) + + # If this is a response which should be easily closed, then + # we will test to see if the value has gone back down to + # zero. + if clsname in closables: + + # Sometimes we try to get the answer too quickly - we + # will wait for 100 ms before asking again if we didn't + # get the answer we wanted. + if self.body != '0': + import time + time.sleep(0.1) + self.getPage("/count/" + clsname) + + stream_counts[clsname] = int(self.body) + + # Check that we closed off the classes which should provide + # easy mechanisms for doing so. + for clsname in closables: + assert stream_counts[clsname] == 0, ( + 'did not close off stream response correctly, expected ' + 'count of zero for %s: %s' % (clsname, stream_counts) + ) diff --git a/deps/cherrypy/test/test_json.py b/deps/cherrypy/test/test_json.py new file mode 100644 index 00000000..7dffb429 --- /dev/null +++ b/deps/cherrypy/test/test_json.py @@ -0,0 +1,101 @@ +import cherrypy +from cherrypy.test import helper + +from cherrypy._cpcompat import json + + +json_out = cherrypy.config(**{'tools.json_out.on': True}) +json_in = cherrypy.config(**{'tools.json_in.on': True}) + +class JsonTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root(object): + + @cherrypy.expose + def plain(self): + return 'hello' + + @cherrypy.expose + @json_out + def json_string(self): + return 'hello' + + @cherrypy.expose + @json_out + def json_list(self): + return ['a', 'b', 42] + + @cherrypy.expose + @json_out + def json_dict(self): + return {'answer': 42} + + @cherrypy.expose + @json_in + def json_post(self): + if cherrypy.request.json == [13, 'c']: + return 'ok' + else: + return 'nok' + + @cherrypy.expose + @json_out + @cherrypy.config(**{'tools.caching.on': True}) + def json_cached(self): + return 'hello there' + + root = Root() + cherrypy.tree.mount(root) + + def test_json_output(self): + if json is None: + self.skip("json not found ") + return + + self.getPage("/plain") + self.assertBody("hello") + + self.getPage("/json_string") + self.assertBody('"hello"') + + self.getPage("/json_list") + self.assertBody('["a", "b", 42]') + + self.getPage("/json_dict") + self.assertBody('{"answer": 42}') + + def test_json_input(self): + if json is None: + self.skip("json not found ") + return + + body = '[13, "c"]' + headers = [('Content-Type', 'application/json'), + ('Content-Length', str(len(body)))] + self.getPage("/json_post", method="POST", headers=headers, body=body) + self.assertBody('ok') + + body = '[13, "c"]' + headers = [('Content-Type', 'text/plain'), + ('Content-Length', str(len(body)))] + self.getPage("/json_post", method="POST", headers=headers, body=body) + self.assertStatus(415, 'Expected an application/json content type') + + body = '[13, -]' + headers = [('Content-Type', 'application/json'), + ('Content-Length', str(len(body)))] + self.getPage("/json_post", method="POST", headers=headers, body=body) + self.assertStatus(400, 'Invalid JSON document') + + def test_cached(self): + if json is None: + self.skip("json not found ") + return + + self.getPage("/json_cached") + self.assertStatus(200, '"hello"') + + self.getPage("/json_cached") # 2'nd time to hit cache + self.assertStatus(200, '"hello"') diff --git a/deps/cherrypy/test/test_logging.py b/deps/cherrypy/test/test_logging.py new file mode 100644 index 00000000..ebaced89 --- /dev/null +++ b/deps/cherrypy/test/test_logging.py @@ -0,0 +1,179 @@ +"""Basic tests for the CherryPy core: request handling.""" + +import os +localDir = os.path.dirname(__file__) + +import six + +import cherrypy +from cherrypy._cpcompat import ntob, ntou + +access_log = os.path.join(localDir, "access.log") +error_log = os.path.join(localDir, "error.log") + +# Some unicode strings. +tartaros = ntou('\u03a4\u1f71\u03c1\u03c4\u03b1\u03c1\u03bf\u03c2', 'escape') +erebos = ntou('\u0388\u03c1\u03b5\u03b2\u03bf\u03c2.com', 'escape') + + +def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "hello" + + @cherrypy.expose + def uni_code(self): + cherrypy.request.login = tartaros + cherrypy.request.remote.name = erebos + + @cherrypy.expose + def slashes(self): + cherrypy.request.request_line = r'GET /slashed\path HTTP/1.1' + + @cherrypy.expose + def whitespace(self): + # User-Agent = "User-Agent" ":" 1*( product | comment ) + # comment = "(" *( ctext | quoted-pair | comment ) ")" + # ctext = + # TEXT = + # LWS = [CRLF] 1*( SP | HT ) + cherrypy.request.headers['User-Agent'] = 'Browzuh (1.0\r\n\t\t.3)' + + @cherrypy.expose + def as_string(self): + return "content" + + @cherrypy.expose + def as_yield(self): + yield "content" + + @cherrypy.expose + @cherrypy.config(**{'tools.log_tracebacks.on': True}) + def error(self): + raise ValueError() + + root = Root() + + cherrypy.config.update({ + 'log.error_file': error_log, + 'log.access_file': access_log, + }) + cherrypy.tree.mount(root) + + +from cherrypy.test import helper, logtest + + +class AccessLogTests(helper.CPWebCase, logtest.LogCase): + setup_server = staticmethod(setup_server) + + logfile = access_log + + def testNormalReturn(self): + self.markLog() + self.getPage("/as_string", + headers=[('Referer', 'http://www.cherrypy.org/'), + ('User-Agent', 'Mozilla/5.0')]) + self.assertBody('content') + self.assertStatus(200) + + intro = '%s - - [' % self.interface() + + self.assertLog(-1, intro) + + if [k for k, v in self.headers if k.lower() == 'content-length']: + self.assertLog(-1, '] "GET %s/as_string HTTP/1.1" 200 7 ' + '"http://www.cherrypy.org/" "Mozilla/5.0"' + % self.prefix()) + else: + self.assertLog(-1, '] "GET %s/as_string HTTP/1.1" 200 - ' + '"http://www.cherrypy.org/" "Mozilla/5.0"' + % self.prefix()) + + def testNormalYield(self): + self.markLog() + self.getPage("/as_yield") + self.assertBody('content') + self.assertStatus(200) + + intro = '%s - - [' % self.interface() + + self.assertLog(-1, intro) + if [k for k, v in self.headers if k.lower() == 'content-length']: + self.assertLog(-1, '] "GET %s/as_yield HTTP/1.1" 200 7 "" ""' % + self.prefix()) + else: + self.assertLog(-1, '] "GET %s/as_yield HTTP/1.1" 200 - "" ""' + % self.prefix()) + + def testCustomLogFormat(self): + '''Test a customized access_log_format string, + which is a feature of _cplogging.LogManager.access() ''' + + original_logformat = cherrypy._cplogging.LogManager.access_log_format + cherrypy._cplogging.LogManager.access_log_format = \ + '{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}" {o}' \ + if six.PY3 else \ + '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" %(o)s' + + self.markLog() + self.getPage("/as_string", headers=[('Referer', 'REFERER'), + ('User-Agent', 'USERAGENT'), + ('Host', 'HOST')]) + self.assertLog(-1, '%s - - [' % self.interface()) + self.assertLog(-1, '] "GET /as_string HTTP/1.1" ' + '200 7 "REFERER" "USERAGENT" HOST') + + cherrypy._cplogging.LogManager.access_log_format = original_logformat + + def testEscapedOutput(self): + # Test unicode in access log pieces. + self.markLog() + self.getPage("/uni_code") + self.assertStatus(200) + if six.PY3: + # The repr of a bytestring in six.PY3 includes a b'' prefix + self.assertLog(-1, repr(tartaros.encode('utf8'))[2:-1]) + else: + self.assertLog(-1, repr(tartaros.encode('utf8'))[1:-1]) + # Test the erebos value. Included inline for your enlightenment. + # Note the 'r' prefix--those backslashes are literals. + self.assertLog(-1, r'\xce\x88\xcf\x81\xce\xb5\xce\xb2\xce\xbf\xcf\x82') + + # Test backslashes in output. + self.markLog() + self.getPage("/slashes") + self.assertStatus(200) + if six.PY3: + self.assertLog(-1, ntob('"GET /slashed\\path HTTP/1.1"')) + else: + self.assertLog(-1, r'"GET /slashed\\path HTTP/1.1"') + + # Test whitespace in output. + self.markLog() + self.getPage("/whitespace") + self.assertStatus(200) + # Again, note the 'r' prefix. + self.assertLog(-1, r'"Browzuh (1.0\r\n\t\t.3)"') + + +class ErrorLogTests(helper.CPWebCase, logtest.LogCase): + setup_server = staticmethod(setup_server) + + logfile = error_log + + def testTracebacks(self): + # Test that tracebacks get written to the error log. + self.markLog() + ignore = helper.webtest.ignored_exceptions + ignore.append(ValueError) + try: + self.getPage("/error") + self.assertInBody("raise ValueError()") + self.assertLog(0, 'HTTP') + self.assertLog(1, 'Traceback (most recent call last):') + self.assertLog(-2, 'raise ValueError()') + finally: + ignore.pop() diff --git a/deps/cherrypy/test/test_mime.py b/deps/cherrypy/test/test_mime.py new file mode 100644 index 00000000..7826fd56 --- /dev/null +++ b/deps/cherrypy/test/test_mime.py @@ -0,0 +1,135 @@ +"""Tests for various MIME issues, including the safe_multipart Tool.""" + +import cherrypy +from cherrypy._cpcompat import ntob, ntou, sorted + + +def setup_server(): + + class Root: + + @cherrypy.expose + def multipart(self, parts): + return repr(parts) + + @cherrypy.expose + def multipart_form_data(self, **kwargs): + return repr(list(sorted(kwargs.items()))) + + @cherrypy.expose + def flashupload(self, Filedata, Upload, Filename): + return ("Upload: %s, Filename: %s, Filedata: %r" % + (Upload, Filename, Filedata.file.read())) + + cherrypy.config.update({'server.max_request_body_size': 0}) + cherrypy.tree.mount(Root()) + + +# Client-side code # + +from cherrypy.test import helper + + +class MultipartTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_multipart(self): + text_part = ntou("This is the text version") + html_part = ntou( + """ + + + + + + +This is the HTML version + + +""") + body = '\r\n'.join([ + "--123456789", + "Content-Type: text/plain; charset='ISO-8859-1'", + "Content-Transfer-Encoding: 7bit", + "", + text_part, + "--123456789", + "Content-Type: text/html; charset='ISO-8859-1'", + "", + html_part, + "--123456789--"]) + headers = [ + ('Content-Type', 'multipart/mixed; boundary=123456789'), + ('Content-Length', str(len(body))), + ] + self.getPage('/multipart', headers, "POST", body) + self.assertBody(repr([text_part, html_part])) + + def test_multipart_form_data(self): + body = '\r\n'.join([ + '--X', + 'Content-Disposition: form-data; name="foo"', + '', + 'bar', + '--X', + # Test a param with more than one value. + # See + # https://github.com/cherrypy/cherrypy/issues/1028 + 'Content-Disposition: form-data; name="baz"', + '', + '111', + '--X', + 'Content-Disposition: form-data; name="baz"', + '', + '333', + '--X--' + ]) + self.getPage('/multipart_form_data', method='POST', + headers=[( + "Content-Type", "multipart/form-data;boundary=X"), + ("Content-Length", str(len(body))), + ], + body=body), + self.assertBody( + repr([('baz', [ntou('111'), ntou('333')]), ('foo', ntou('bar'))])) + + +class SafeMultipartHandlingTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_Flash_Upload(self): + headers = [ + ('Accept', 'text/*'), + ('Content-Type', 'multipart/form-data; ' + 'boundary=----------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6'), + ('User-Agent', 'Shockwave Flash'), + ('Host', 'www.example.com:54583'), + ('Content-Length', '499'), + ('Connection', 'Keep-Alive'), + ('Cache-Control', 'no-cache'), + ] + filedata = ntob('\r\n' + '\r\n' + '\r\n') + body = (ntob( + '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n' + 'Content-Disposition: form-data; name="Filename"\r\n' + '\r\n' + '.project\r\n' + '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n' + 'Content-Disposition: form-data; ' + 'name="Filedata"; filename=".project"\r\n' + 'Content-Type: application/octet-stream\r\n' + '\r\n') + + filedata + + ntob('\r\n' + '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n' + 'Content-Disposition: form-data; name="Upload"\r\n' + '\r\n' + 'Submit Query\r\n' + # Flash apps omit the trailing \r\n on the last line: + '------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6--' + )) + self.getPage('/flashupload', headers, "POST", body) + self.assertBody("Upload: Submit Query, Filename: .project, " + "Filedata: %r" % filedata) diff --git a/deps/cherrypy/test/test_misc_tools.py b/deps/cherrypy/test/test_misc_tools.py new file mode 100644 index 00000000..4e38cd1c --- /dev/null +++ b/deps/cherrypy/test/test_misc_tools.py @@ -0,0 +1,217 @@ +import os +localDir = os.path.dirname(__file__) +logfile = os.path.join(localDir, "test_misc_tools.log") + +import cherrypy +from cherrypy import tools + + +def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + yield "Hello, world" + h = [("Content-Language", "en-GB"), ('Content-Type', 'text/plain')] + tools.response_headers(headers=h)(index) + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.response_headers.on': True, + 'tools.response_headers.headers': [ + ("Content-Language", "fr"), + ('Content-Type', 'text/plain'), + ], + 'tools.log_hooks.on': True, + }) + def other(self): + return "salut" + + @cherrypy.config(**{'tools.accept.on': True}) + class Accept: + + @cherrypy.expose + def index(self): + return 'Atom feed' + + # In Python 2.4+, we could use a decorator instead: + # @tools.accept('application/atom+xml') + @cherrypy.expose + @cherrypy.config(**{'tools.accept.media': 'application/atom+xml'}) + def feed(self): + return """ + + Unknown Blog +""" + + @cherrypy.expose + def select(self): + # We could also write this: mtype = cherrypy.lib.accept.accept(...) + mtype = tools.accept.callable(['text/html', 'text/plain']) + if mtype == 'text/html': + return "

Page Title

" + else: + return "PAGE TITLE" + + class Referer: + + @cherrypy.expose + def accept(self): + return "Accepted!" + reject = accept + + class AutoVary: + + @cherrypy.expose + def index(self): + # Read a header directly with 'get' + ae = cherrypy.request.headers.get('Accept-Encoding') + # Read a header directly with '__getitem__' + cl = cherrypy.request.headers['Host'] + # Read a header directly with '__contains__' + hasif = 'If-Modified-Since' in cherrypy.request.headers + # Read a header directly with 'has_key' + if hasattr(dict, 'has_key'): + # Python 2 + has = cherrypy.request.headers.has_key('Range') + else: + # Python 3 + has = 'Range' in cherrypy.request.headers + # Call a lib function + mtype = tools.accept.callable(['text/html', 'text/plain']) + return "Hello, world!" + + conf = {'/referer': {'tools.referer.on': True, + 'tools.referer.pattern': r'http://[^/]*example\.com', + }, + '/referer/reject': {'tools.referer.accept': False, + 'tools.referer.accept_missing': True, + }, + '/autovary': {'tools.autovary.on': True}, + } + + root = Root() + root.referer = Referer() + root.accept = Accept() + root.autovary = AutoVary() + cherrypy.tree.mount(root, config=conf) + cherrypy.config.update({'log.error_file': logfile}) + + +from cherrypy.test import helper + + +class ResponseHeadersTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def testResponseHeadersDecorator(self): + self.getPage('/') + self.assertHeader("Content-Language", "en-GB") + self.assertHeader('Content-Type', 'text/plain;charset=utf-8') + + def testResponseHeaders(self): + self.getPage('/other') + self.assertHeader("Content-Language", "fr") + self.assertHeader('Content-Type', 'text/plain;charset=utf-8') + + +class RefererTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def testReferer(self): + self.getPage('/referer/accept') + self.assertErrorPage(403, 'Forbidden Referer header.') + + self.getPage('/referer/accept', + headers=[('Referer', 'http://www.example.com/')]) + self.assertStatus(200) + self.assertBody('Accepted!') + + # Reject + self.getPage('/referer/reject') + self.assertStatus(200) + self.assertBody('Accepted!') + + self.getPage('/referer/reject', + headers=[('Referer', 'http://www.example.com/')]) + self.assertErrorPage(403, 'Forbidden Referer header.') + + +class AcceptTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_Accept_Tool(self): + # Test with no header provided + self.getPage('/accept/feed') + self.assertStatus(200) + self.assertInBody('Unknown Blog') + + # Specify exact media type + self.getPage('/accept/feed', + headers=[('Accept', 'application/atom+xml')]) + self.assertStatus(200) + self.assertInBody('Unknown Blog') + + # Specify matching media range + self.getPage('/accept/feed', headers=[('Accept', 'application/*')]) + self.assertStatus(200) + self.assertInBody('Unknown Blog') + + # Specify all media ranges + self.getPage('/accept/feed', headers=[('Accept', '*/*')]) + self.assertStatus(200) + self.assertInBody('Unknown Blog') + + # Specify unacceptable media types + self.getPage('/accept/feed', headers=[('Accept', 'text/html')]) + self.assertErrorPage(406, + "Your client sent this Accept header: text/html. " + "But this resource only emits these media types: " + "application/atom+xml.") + + # Test resource where tool is 'on' but media is None (not set). + self.getPage('/accept/') + self.assertStatus(200) + self.assertBody('Atom feed') + + def test_accept_selection(self): + # Try both our expected media types + self.getPage('/accept/select', [('Accept', 'text/html')]) + self.assertStatus(200) + self.assertBody('

Page Title

') + self.getPage('/accept/select', [('Accept', 'text/plain')]) + self.assertStatus(200) + self.assertBody('PAGE TITLE') + self.getPage('/accept/select', + [('Accept', 'text/plain, text/*;q=0.5')]) + self.assertStatus(200) + self.assertBody('PAGE TITLE') + + # text/* and */* should prefer text/html since it comes first + # in our 'media' argument to tools.accept + self.getPage('/accept/select', [('Accept', 'text/*')]) + self.assertStatus(200) + self.assertBody('

Page Title

') + self.getPage('/accept/select', [('Accept', '*/*')]) + self.assertStatus(200) + self.assertBody('

Page Title

') + + # Try unacceptable media types + self.getPage('/accept/select', [('Accept', 'application/xml')]) + self.assertErrorPage( + 406, + "Your client sent this Accept header: application/xml. " + "But this resource only emits these media types: " + "text/html, text/plain.") + + +class AutoVaryTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def testAutoVary(self): + self.getPage('/autovary/') + self.assertHeader( + "Vary", + 'Accept, Accept-Charset, Accept-Encoding, ' + 'Host, If-Modified-Since, Range' + ) diff --git a/deps/cherrypy/test/test_objectmapping.py b/deps/cherrypy/test/test_objectmapping.py new file mode 100644 index 00000000..c2b90bdc --- /dev/null +++ b/deps/cherrypy/test/test_objectmapping.py @@ -0,0 +1,430 @@ +import sys +import cherrypy +from cherrypy._cpcompat import ntou +from cherrypy._cptree import Application +from cherrypy.test import helper + +script_names = ["", "/foo", "/users/fred/blog", "/corp/blog"] + + +class ObjectMappingTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self, name="world"): + return name + + @cherrypy.expose + def foobar(self): + return "bar" + + @cherrypy.expose + def default(self, *params, **kwargs): + return "default:" + repr(params) + + @cherrypy.expose + def other(self): + return "other" + + @cherrypy.expose + def extra(self, *p): + return repr(p) + + @cherrypy.expose + def redirect(self): + raise cherrypy.HTTPRedirect('dir1/', 302) + + def notExposed(self): + return "not exposed" + + @cherrypy.expose + def confvalue(self): + return cherrypy.request.config.get("user") + + @cherrypy.expose + def redirect_via_url(self, path): + raise cherrypy.HTTPRedirect(cherrypy.url(path)) + + @cherrypy.expose + def translate_html(self): + return "OK" + + @cherrypy.expose + def mapped_func(self, ID=None): + return "ID is %s" % ID + setattr(Root, "Von B\xfclow", mapped_func) + + class Exposing: + + @cherrypy.expose + def base(self): + return "expose works!" + cherrypy.expose(base, "1") + cherrypy.expose(base, "2") + + class ExposingNewStyle(object): + + @cherrypy.expose + def base(self): + return "expose works!" + cherrypy.expose(base, "1") + cherrypy.expose(base, "2") + + class Dir1: + + @cherrypy.expose + def index(self): + return "index for dir1" + + @cherrypy.expose + @cherrypy.config(**{'tools.trailing_slash.extra': True}) + def myMethod(self): + return "myMethod from dir1, path_info is:" + repr( + cherrypy.request.path_info) + + @cherrypy.expose + def default(self, *params): + return "default for dir1, param is:" + repr(params) + + class Dir2: + + @cherrypy.expose + def index(self): + return "index for dir2, path is:" + cherrypy.request.path_info + + @cherrypy.expose + def script_name(self): + return cherrypy.tree.script_name() + + @cherrypy.expose + def cherrypy_url(self): + return cherrypy.url("/extra") + + @cherrypy.expose + def posparam(self, *vpath): + return "/".join(vpath) + + class Dir3: + + def default(self): + return "default for dir3, not exposed" + + class Dir4: + + def index(self): + return "index for dir4, not exposed" + + class DefNoIndex: + + @cherrypy.expose + def default(self, *args): + raise cherrypy.HTTPRedirect("contact") + + # MethodDispatcher code + @cherrypy.expose + class ByMethod: + + def __init__(self, *things): + self.things = list(things) + + def GET(self): + return repr(self.things) + + def POST(self, thing): + self.things.append(thing) + + class Collection: + default = ByMethod('a', 'bit') + + Root.exposing = Exposing() + Root.exposingnew = ExposingNewStyle() + Root.dir1 = Dir1() + Root.dir1.dir2 = Dir2() + Root.dir1.dir2.dir3 = Dir3() + Root.dir1.dir2.dir3.dir4 = Dir4() + Root.defnoindex = DefNoIndex() + Root.bymethod = ByMethod('another') + Root.collection = Collection() + + d = cherrypy.dispatch.MethodDispatcher() + for url in script_names: + conf = {'/': {'user': (url or "/").split("/")[-2]}, + '/bymethod': {'request.dispatch': d}, + '/collection': {'request.dispatch': d}, + } + cherrypy.tree.mount(Root(), url, conf) + + class Isolated: + + @cherrypy.expose + def index(self): + return "made it!" + + cherrypy.tree.mount(Isolated(), "/isolated") + + @cherrypy.expose + class AnotherApp: + + def GET(self): + return "milk" + + cherrypy.tree.mount(AnotherApp(), "/app", + {'/': {'request.dispatch': d}}) + + def testObjectMapping(self): + for url in script_names: + prefix = self.script_name = url + + self.getPage('/') + self.assertBody('world') + + self.getPage("/dir1/myMethod") + self.assertBody( + "myMethod from dir1, path_info is:'/dir1/myMethod'") + + self.getPage("/this/method/does/not/exist") + self.assertBody( + "default:('this', 'method', 'does', 'not', 'exist')") + + self.getPage("/extra/too/much") + self.assertBody("('too', 'much')") + + self.getPage("/other") + self.assertBody('other') + + self.getPage("/notExposed") + self.assertBody("default:('notExposed',)") + + self.getPage("/dir1/dir2/") + self.assertBody('index for dir2, path is:/dir1/dir2/') + + # Test omitted trailing slash (should be redirected by default). + self.getPage("/dir1/dir2") + self.assertStatus(301) + self.assertHeader('Location', '%s/dir1/dir2/' % self.base()) + + # Test extra trailing slash (should be redirected if configured). + self.getPage("/dir1/myMethod/") + self.assertStatus(301) + self.assertHeader('Location', '%s/dir1/myMethod' % self.base()) + + # Test that default method must be exposed in order to match. + self.getPage("/dir1/dir2/dir3/dir4/index") + self.assertBody( + "default for dir1, param is:('dir2', 'dir3', 'dir4', 'index')") + + # Test *vpath when default() is defined but not index() + # This also tests HTTPRedirect with default. + self.getPage("/defnoindex") + self.assertStatus((302, 303)) + self.assertHeader('Location', '%s/contact' % self.base()) + self.getPage("/defnoindex/") + self.assertStatus((302, 303)) + self.assertHeader('Location', '%s/defnoindex/contact' % + self.base()) + self.getPage("/defnoindex/page") + self.assertStatus((302, 303)) + self.assertHeader('Location', '%s/defnoindex/contact' % + self.base()) + + self.getPage("/redirect") + self.assertStatus('302 Found') + self.assertHeader('Location', '%s/dir1/' % self.base()) + + if not getattr(cherrypy.server, "using_apache", False): + # Test that we can use URL's which aren't all valid Python + # identifiers + # This should also test the %XX-unquoting of URL's. + self.getPage("/Von%20B%fclow?ID=14") + self.assertBody("ID is 14") + + # Test that %2F in the path doesn't get unquoted too early; + # that is, it should not be used to separate path components. + # See ticket #393. + self.getPage("/page%2Fname") + self.assertBody("default:('page/name',)") + + self.getPage("/dir1/dir2/script_name") + self.assertBody(url) + self.getPage("/dir1/dir2/cherrypy_url") + self.assertBody("%s/extra" % self.base()) + + # Test that configs don't overwrite each other from diferent apps + self.getPage("/confvalue") + self.assertBody((url or "/").split("/")[-2]) + + self.script_name = "" + + # Test absoluteURI's in the Request-Line + self.getPage('http://%s:%s/' % (self.interface(), self.PORT)) + self.assertBody('world') + + self.getPage('http://%s:%s/abs/?service=http://192.168.0.1/x/y/z' % + (self.interface(), self.PORT)) + self.assertBody("default:('abs',)") + + self.getPage('/rel/?service=http://192.168.120.121:8000/x/y/z') + self.assertBody("default:('rel',)") + + # Test that the "isolated" app doesn't leak url's into the root app. + # If it did leak, Root.default() would answer with + # "default:('isolated', 'doesnt', 'exist')". + self.getPage("/isolated/") + self.assertStatus("200 OK") + self.assertBody("made it!") + self.getPage("/isolated/doesnt/exist") + self.assertStatus("404 Not Found") + + # Make sure /foobar maps to Root.foobar and not to the app + # mounted at /foo. See + # https://github.com/cherrypy/cherrypy/issues/573 + self.getPage("/foobar") + self.assertBody("bar") + + def test_translate(self): + self.getPage("/translate_html") + self.assertStatus("200 OK") + self.assertBody("OK") + + self.getPage("/translate.html") + self.assertStatus("200 OK") + self.assertBody("OK") + + self.getPage("/translate-html") + self.assertStatus("200 OK") + self.assertBody("OK") + + def test_redir_using_url(self): + for url in script_names: + prefix = self.script_name = url + + # Test the absolute path to the parent (leading slash) + self.getPage('/redirect_via_url?path=./') + self.assertStatus(('302 Found', '303 See Other')) + self.assertHeader('Location', '%s/' % self.base()) + + # Test the relative path to the parent (no leading slash) + self.getPage('/redirect_via_url?path=./') + self.assertStatus(('302 Found', '303 See Other')) + self.assertHeader('Location', '%s/' % self.base()) + + # Test the absolute path to the parent (leading slash) + self.getPage('/redirect_via_url/?path=./') + self.assertStatus(('302 Found', '303 See Other')) + self.assertHeader('Location', '%s/' % self.base()) + + # Test the relative path to the parent (no leading slash) + self.getPage('/redirect_via_url/?path=./') + self.assertStatus(('302 Found', '303 See Other')) + self.assertHeader('Location', '%s/' % self.base()) + + def testPositionalParams(self): + self.getPage("/dir1/dir2/posparam/18/24/hut/hike") + self.assertBody("18/24/hut/hike") + + # intermediate index methods should not receive posparams; + # only the "final" index method should do so. + self.getPage("/dir1/dir2/5/3/sir") + self.assertBody("default for dir1, param is:('dir2', '5', '3', 'sir')") + + # test that extra positional args raises an 404 Not Found + # See https://github.com/cherrypy/cherrypy/issues/733. + self.getPage("/dir1/dir2/script_name/extra/stuff") + self.assertStatus(404) + + def testExpose(self): + # Test the cherrypy.expose function/decorator + self.getPage("/exposing/base") + self.assertBody("expose works!") + + self.getPage("/exposing/1") + self.assertBody("expose works!") + + self.getPage("/exposing/2") + self.assertBody("expose works!") + + self.getPage("/exposingnew/base") + self.assertBody("expose works!") + + self.getPage("/exposingnew/1") + self.assertBody("expose works!") + + self.getPage("/exposingnew/2") + self.assertBody("expose works!") + + def testMethodDispatch(self): + self.getPage("/bymethod") + self.assertBody("['another']") + self.assertHeader('Allow', 'GET, HEAD, POST') + + self.getPage("/bymethod", method="HEAD") + self.assertBody("") + self.assertHeader('Allow', 'GET, HEAD, POST') + + self.getPage("/bymethod", method="POST", body="thing=one") + self.assertBody("") + self.assertHeader('Allow', 'GET, HEAD, POST') + + self.getPage("/bymethod") + self.assertBody(repr(['another', ntou('one')])) + self.assertHeader('Allow', 'GET, HEAD, POST') + + self.getPage("/bymethod", method="PUT") + self.assertErrorPage(405) + self.assertHeader('Allow', 'GET, HEAD, POST') + + # Test default with posparams + self.getPage("/collection/silly", method="POST") + self.getPage("/collection", method="GET") + self.assertBody("['a', 'bit', 'silly']") + + # Test custom dispatcher set on app root (see #737). + self.getPage("/app") + self.assertBody("milk") + + def testTreeMounting(self): + class Root(object): + + @cherrypy.expose + def hello(self): + return "Hello world!" + + # When mounting an application instance, + # we can't specify a different script name in the call to mount. + a = Application(Root(), '/somewhere') + self.assertRaises(ValueError, cherrypy.tree.mount, a, '/somewhereelse') + + # When mounting an application instance... + a = Application(Root(), '/somewhere') + # ...we MUST allow in identical script name in the call to mount... + cherrypy.tree.mount(a, '/somewhere') + self.getPage('/somewhere/hello') + self.assertStatus(200) + # ...and MUST allow a missing script_name. + del cherrypy.tree.apps['/somewhere'] + cherrypy.tree.mount(a) + self.getPage('/somewhere/hello') + self.assertStatus(200) + + # In addition, we MUST be able to create an Application using + # script_name == None for access to the wsgi_environ. + a = Application(Root(), script_name=None) + # However, this does not apply to tree.mount + self.assertRaises(TypeError, cherrypy.tree.mount, a, None) + + def testKeywords(self): + if sys.version_info < (3,): + return self.skip("skipped (Python 3 only)") + exec("""class Root(object): + @cherrypy.expose + def hello(self, *, name='world'): + return 'Hello %s!' % name +cherrypy.tree.mount(Application(Root(), '/keywords'))""") + + self.getPage('/keywords/hello') + self.assertStatus(200) + self.getPage('/keywords/hello/extra') + self.assertStatus(404) diff --git a/deps/cherrypy/test/test_params.py b/deps/cherrypy/test/test_params.py new file mode 100644 index 00000000..bb13da16 --- /dev/null +++ b/deps/cherrypy/test/test_params.py @@ -0,0 +1,60 @@ +import sys +import textwrap + +import cherrypy +from cherrypy.test import helper + + +class ParamsTest(helper.CPWebCase): + @staticmethod + def setup_server(): + class Root: + @cherrypy.expose + @cherrypy.tools.params() + def resource(self, limit=None, sort=None): + return type(limit).__name__ + # for testing on Py 2 + resource.__annotations__ = {'limit': int} + conf = {'/': {'tools.params.on': True}} + cherrypy.tree.mount(Root(), config=conf) + + def test_pass(self): + self.getPage('/resource') + self.assertStatus(200) + self.assertBody('NoneType') + + self.getPage('/resource?limit=0') + self.assertStatus(200) + self.assertBody('int') + + def test_error(self): + self.getPage('/resource?limit=') + self.assertStatus(400) + self.assertInBody('invalid literal for int') + + cherrypy.config['tools.params.error'] = 422 + self.getPage('/resource?limit=') + self.assertStatus(422) + self.assertInBody('invalid literal for int') + + cherrypy.config['tools.params.exception'] = TypeError + self.getPage('/resource?limit=') + self.assertStatus(500) + + def test_syntax(self): + if sys.version_info < (3,): + return self.skip("skipped (Python 3 only)") + code = textwrap.dedent(""" + class Root: + @cherrypy.expose + @cherrypy.tools.params() + def resource(self, limit: int): + return type(limit).__name__ + conf = {'/': {'tools.params.on': True}} + cherrypy.tree.mount(Root(), config=conf) + """) + exec(code) + + self.getPage('/resource?limit=0') + self.assertStatus(200) + self.assertBody('int') diff --git a/deps/cherrypy/test/test_proxy.py b/deps/cherrypy/test/test_proxy.py new file mode 100644 index 00000000..8e434d15 --- /dev/null +++ b/deps/cherrypy/test/test_proxy.py @@ -0,0 +1,138 @@ +import cherrypy +from cherrypy.test import helper + +script_names = ["", "/path/to/myapp"] + + +class ProxyTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + + # Set up site + cherrypy.config.update({ + 'tools.proxy.on': True, + 'tools.proxy.base': 'www.mydomain.test', + }) + + # Set up application + + class Root: + + def __init__(self, sn): + # Calculate a URL outside of any requests. + self.thisnewpage = cherrypy.url( + "/this/new/page", script_name=sn) + + @cherrypy.expose + def pageurl(self): + return self.thisnewpage + + @cherrypy.expose + def index(self): + raise cherrypy.HTTPRedirect('dummy') + + @cherrypy.expose + def remoteip(self): + return cherrypy.request.remote.ip + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.proxy.local': 'X-Host', + 'tools.trailing_slash.extra': True, + }) + def xhost(self): + raise cherrypy.HTTPRedirect('blah') + + @cherrypy.expose + def base(self): + return cherrypy.request.base + + @cherrypy.expose + @cherrypy.config(**{'tools.proxy.scheme': 'X-Forwarded-Ssl'}) + def ssl(self): + return cherrypy.request.base + + @cherrypy.expose + def newurl(self): + return ("Browse to this page." + % cherrypy.url("/this/new/page")) + + for sn in script_names: + cherrypy.tree.mount(Root(sn), sn) + + def testProxy(self): + self.getPage("/") + self.assertHeader('Location', + "%s://www.mydomain.test%s/dummy" % + (self.scheme, self.prefix())) + + # Test X-Forwarded-Host (Apache 1.3.33+ and Apache 2) + self.getPage( + "/", headers=[('X-Forwarded-Host', 'http://www.example.test')]) + self.assertHeader('Location', "http://www.example.test/dummy") + self.getPage("/", headers=[('X-Forwarded-Host', 'www.example.test')]) + self.assertHeader('Location', "%s://www.example.test/dummy" % + self.scheme) + # Test multiple X-Forwarded-Host headers + self.getPage("/", headers=[ + ('X-Forwarded-Host', 'http://www.example.test, www.cherrypy.test'), + ]) + self.assertHeader('Location', "http://www.example.test/dummy") + + # Test X-Forwarded-For (Apache2) + self.getPage("/remoteip", + headers=[('X-Forwarded-For', '192.168.0.20')]) + self.assertBody("192.168.0.20") + #Fix bug #1268 + self.getPage("/remoteip", + headers=[ + ('X-Forwarded-For', '67.15.36.43, 192.168.0.20') + ]) + self.assertBody("67.15.36.43") + + # Test X-Host (lighttpd; see https://trac.lighttpd.net/trac/ticket/418) + self.getPage("/xhost", headers=[('X-Host', 'www.example.test')]) + self.assertHeader('Location', "%s://www.example.test/blah" % + self.scheme) + + # Test X-Forwarded-Proto (lighttpd) + self.getPage("/base", headers=[('X-Forwarded-Proto', 'https')]) + self.assertBody("https://www.mydomain.test") + + # Test X-Forwarded-Ssl (webfaction?) + self.getPage("/ssl", headers=[('X-Forwarded-Ssl', 'on')]) + self.assertBody("https://www.mydomain.test") + + # Test cherrypy.url() + for sn in script_names: + # Test the value inside requests + self.getPage(sn + "/newurl") + self.assertBody( + "Browse to this page.") + self.getPage(sn + "/newurl", headers=[('X-Forwarded-Host', + 'http://www.example.test')]) + self.assertBody("Browse to this page.") + + # Test the value outside requests + port = "" + if self.scheme == "http" and self.PORT != 80: + port = ":%s" % self.PORT + elif self.scheme == "https" and self.PORT != 443: + port = ":%s" % self.PORT + host = self.HOST + if host in ('0.0.0.0', '::'): + import socket + host = socket.gethostname() + expected = ("%s://%s%s%s/this/new/page" + % (self.scheme, host, port, sn)) + self.getPage(sn + "/pageurl") + self.assertBody(expected) + + # Test trailing slash (see + # https://github.com/cherrypy/cherrypy/issues/562). + self.getPage("/xhost/", headers=[('X-Host', 'www.example.test')]) + self.assertHeader('Location', "%s://www.example.test/xhost" + % self.scheme) diff --git a/deps/cherrypy/test/test_refleaks.py b/deps/cherrypy/test/test_refleaks.py new file mode 100644 index 00000000..929f6b1f --- /dev/null +++ b/deps/cherrypy/test/test_refleaks.py @@ -0,0 +1,67 @@ +"""Tests for refleaks.""" + +import itertools +import platform + +from cherrypy._cpcompat import HTTPConnection, HTTPSConnection +import threading + +import cherrypy + + +data = object() + + +from cherrypy.test import helper + + +class ReferenceTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + + class Root: + + @cherrypy.expose + def index(self, *args, **kwargs): + cherrypy.request.thing = data + return "Hello world!" + + cherrypy.tree.mount(Root()) + + def test_threadlocal_garbage(self): + if platform.system() == 'Darwin': + self.skip("queue issues; see #1474") + success = itertools.count() + + def getpage(): + host = '%s:%s' % (self.interface(), self.PORT) + if self.scheme == 'https': + c = HTTPSConnection(host) + else: + c = HTTPConnection(host) + try: + c.putrequest('GET', '/') + c.endheaders() + response = c.getresponse() + body = response.read() + self.assertEqual(response.status, 200) + self.assertEqual(body, b"Hello world!") + finally: + c.close() + next(success) + + ITERATIONS = 25 + + ts = [ + threading.Thread(target=getpage) + for _ in range(ITERATIONS) + ] + + for t in ts: + t.start() + + for t in ts: + t.join() + + self.assertEqual(next(success), ITERATIONS) diff --git a/deps/cherrypy/test/test_request_obj.py b/deps/cherrypy/test/test_request_obj.py new file mode 100644 index 00000000..2ad06fa8 --- /dev/null +++ b/deps/cherrypy/test/test_request_obj.py @@ -0,0 +1,808 @@ +"""Basic tests for the cherrypy.Request object.""" + +import os +localDir = os.path.dirname(__file__) +import sys +import types + +import six + +from cherrypy._cpcompat import IncompleteRead, ntob, ntou + +import cherrypy +from cherrypy.lib import httputil + +defined_http_methods = ("OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", + "TRACE", "PROPFIND") + + +# Client-side code # + +from cherrypy.test import helper + + +class RequestObjectTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "hello" + + @cherrypy.expose + def scheme(self): + return cherrypy.request.scheme + + root = Root() + + class TestType(type): + """Metaclass which automatically exposes all functions in each + subclass, and adds an instance of the subclass as an attribute + of root. + """ + def __init__(cls, name, bases, dct): + type.__init__(cls, name, bases, dct) + for value in dct.values(): + if isinstance(value, types.FunctionType): + value.exposed = True + setattr(root, name.lower(), cls()) + Test = TestType('Test', (object,), {}) + + class PathInfo(Test): + + def default(self, *args): + return cherrypy.request.path_info + + class Params(Test): + + def index(self, thing): + return repr(thing) + + def ismap(self, x, y): + return "Coordinates: %s, %s" % (x, y) + + @cherrypy.config(**{'request.query_string_encoding': 'latin1'}) + def default(self, *args, **kwargs): + return "args: %s kwargs: %s" % (args, sorted(kwargs.items())) + + @cherrypy.expose + class ParamErrorsCallable(object): + + def __call__(self): + return "data" + + class ParamErrors(Test): + + @cherrypy.expose + def one_positional(self, param1): + return "data" + + @cherrypy.expose + def one_positional_args(self, param1, *args): + return "data" + + @cherrypy.expose + def one_positional_args_kwargs(self, param1, *args, **kwargs): + return "data" + + @cherrypy.expose + def one_positional_kwargs(self, param1, **kwargs): + return "data" + + @cherrypy.expose + def no_positional(self): + return "data" + + @cherrypy.expose + def no_positional_args(self, *args): + return "data" + + @cherrypy.expose + def no_positional_args_kwargs(self, *args, **kwargs): + return "data" + + @cherrypy.expose + def no_positional_kwargs(self, **kwargs): + return "data" + + callable_object = ParamErrorsCallable() + + @cherrypy.expose + def raise_type_error(self, **kwargs): + raise TypeError("Client Error") + + @cherrypy.expose + def raise_type_error_with_default_param(self, x, y=None): + return '%d' % 'a' # throw an exception + + def callable_error_page(status, **kwargs): + return "Error %s - Well, I'm very sorry but you haven't paid!" % ( + status) + + @cherrypy.config(**{'tools.log_tracebacks.on': True}) + class Error(Test): + + def reason_phrase(self): + raise cherrypy.HTTPError("410 Gone fishin'") + + @cherrypy.config(**{ + 'error_page.404': os.path.join(localDir, "static/index.html"), + 'error_page.401': callable_error_page, + }) + def custom(self, err='404'): + raise cherrypy.HTTPError( + int(err), "No, really, not found!") + + @cherrypy.config(**{ + 'error_page.default': callable_error_page, + }) + def custom_default(self): + return 1 + 'a' # raise an unexpected error + + @cherrypy.config(**{'error_page.404': "nonexistent.html"}) + def noexist(self): + raise cherrypy.HTTPError(404, "No, really, not found!") + + def page_method(self): + raise ValueError() + + def page_yield(self): + yield "howdy" + raise ValueError() + + @cherrypy.config(**{"response.stream": True}) + def page_streamed(self): + yield "word up" + raise ValueError() + yield "very oops" + + @cherrypy.config(**{'request.show_tracebacks': False}) + def cause_err_in_finalize(self): + # Since status must start with an int, this should error. + cherrypy.response.status = "ZOO OK" + + @cherrypy.config(**{'request.throw_errors': True}) + def rethrow(self): + """Test that an error raised here will be thrown out to + the server. + """ + raise ValueError() + + class Expect(Test): + + def expectation_failed(self): + expect = cherrypy.request.headers.elements("Expect") + if expect and expect[0].value != '100-continue': + raise cherrypy.HTTPError(400) + raise cherrypy.HTTPError(417, 'Expectation Failed') + + class Headers(Test): + + def default(self, headername): + """Spit back out the value for the requested header.""" + return cherrypy.request.headers[headername] + + def doubledheaders(self): + # From https://github.com/cherrypy/cherrypy/issues/165: + # "header field names should not be case sensitive sayes the + # rfc. if i set a headerfield in complete lowercase i end up + # with two header fields, one in lowercase, the other in + # mixed-case." + + # Set the most common headers + hMap = cherrypy.response.headers + hMap['content-type'] = "text/html" + hMap['content-length'] = 18 + hMap['server'] = 'CherryPy headertest' + hMap['location'] = ('%s://%s:%s/headers/' + % (cherrypy.request.local.ip, + cherrypy.request.local.port, + cherrypy.request.scheme)) + + # Set a rare header for fun + hMap['Expires'] = 'Thu, 01 Dec 2194 16:00:00 GMT' + + return "double header test" + + def ifmatch(self): + val = cherrypy.request.headers['If-Match'] + assert isinstance(val, six.text_type) + cherrypy.response.headers['ETag'] = val + return val + + class HeaderElements(Test): + + def get_elements(self, headername): + e = cherrypy.request.headers.elements(headername) + return "\n".join([six.text_type(x) for x in e]) + + class Method(Test): + + def index(self): + m = cherrypy.request.method + if m in defined_http_methods or m == "CONNECT": + return m + + if m == "LINK": + raise cherrypy.HTTPError(405) + else: + raise cherrypy.HTTPError(501) + + def parameterized(self, data): + return data + + def request_body(self): + # This should be a file object (temp file), + # which CP will just pipe back out if we tell it to. + return cherrypy.request.body + + def reachable(self): + return "success" + + class Divorce: + + """HTTP Method handlers shouldn't collide with normal method names. + For example, a GET-handler shouldn't collide with a method named + 'get'. + + If you build HTTP method dispatching into CherryPy, rewrite this + class to use your new dispatch mechanism and make sure that: + "GET /divorce HTTP/1.1" maps to divorce.index() and + "GET /divorce/get?ID=13 HTTP/1.1" maps to divorce.get() + """ + + documents = {} + + @cherrypy.expose + def index(self): + yield "

Choose your document

\n" + yield "
    \n" + for id, contents in self.documents.items(): + yield ( + "
  • %s:" + " %s
  • \n" % (id, id, contents)) + yield "
" + + @cherrypy.expose + def get(self, ID): + return ("Divorce document %s: %s" % + (ID, self.documents.get(ID, "empty"))) + + root.divorce = Divorce() + + class ThreadLocal(Test): + + def index(self): + existing = repr(getattr(cherrypy.request, "asdf", None)) + cherrypy.request.asdf = "rassfrassin" + return existing + + appconf = { + '/method': { + 'request.methods_with_bodies': ("POST", "PUT", "PROPFIND") + }, + } + cherrypy.tree.mount(root, config=appconf) + + def test_scheme(self): + self.getPage("/scheme") + self.assertBody(self.scheme) + + def testRelativeURIPathInfo(self): + self.getPage("/pathinfo/foo/bar") + self.assertBody("/pathinfo/foo/bar") + + def testAbsoluteURIPathInfo(self): + # http://cherrypy.org/ticket/1061 + self.getPage("http://localhost/pathinfo/foo/bar") + self.assertBody("/pathinfo/foo/bar") + + def testParams(self): + self.getPage("/params/?thing=a") + self.assertBody(repr(ntou("a"))) + + self.getPage("/params/?thing=a&thing=b&thing=c") + self.assertBody(repr([ntou('a'), ntou('b'), ntou('c')])) + + # Test friendly error message when given params are not accepted. + cherrypy.config.update({"request.show_mismatched_params": True}) + self.getPage("/params/?notathing=meeting") + self.assertInBody("Missing parameters: thing") + self.getPage("/params/?thing=meeting¬athing=meeting") + self.assertInBody("Unexpected query string parameters: notathing") + + # Test ability to turn off friendly error messages + cherrypy.config.update({"request.show_mismatched_params": False}) + self.getPage("/params/?notathing=meeting") + self.assertInBody("Not Found") + self.getPage("/params/?thing=meeting¬athing=meeting") + self.assertInBody("Not Found") + + # Test "% HEX HEX"-encoded URL, param keys, and values + self.getPage("/params/%d4%20%e3/cheese?Gruy%E8re=Bulgn%e9ville") + self.assertBody("args: %s kwargs: %s" % + (('\xd4 \xe3', 'cheese'), + [('Gruy\xe8re', ntou('Bulgn\xe9ville'))])) + + # Make sure that encoded = and & get parsed correctly + self.getPage( + "/params/code?url=http%3A//cherrypy.org/index%3Fa%3D1%26b%3D2") + self.assertBody("args: %s kwargs: %s" % + (('code',), + [('url', ntou('http://cherrypy.org/index?a=1&b=2'))])) + + # Test coordinates sent by + self.getPage("/params/ismap?223,114") + self.assertBody("Coordinates: 223, 114") + + # Test "name[key]" dict-like params + self.getPage("/params/dictlike?a[1]=1&a[2]=2&b=foo&b[bar]=baz") + self.assertBody("args: %s kwargs: %s" % + (('dictlike',), + [('a[1]', ntou('1')), ('a[2]', ntou('2')), + ('b', ntou('foo')), ('b[bar]', ntou('baz'))])) + + def testParamErrors(self): + + # test that all of the handlers work when given + # the correct parameters in order to ensure that the + # errors below aren't coming from some other source. + for uri in ( + '/paramerrors/one_positional?param1=foo', + '/paramerrors/one_positional_args?param1=foo', + '/paramerrors/one_positional_args/foo', + '/paramerrors/one_positional_args/foo/bar/baz', + '/paramerrors/one_positional_args_kwargs?' + 'param1=foo¶m2=bar', + '/paramerrors/one_positional_args_kwargs/foo?' + 'param2=bar¶m3=baz', + '/paramerrors/one_positional_args_kwargs/foo/bar/baz?' + 'param2=bar¶m3=baz', + '/paramerrors/one_positional_kwargs?' + 'param1=foo¶m2=bar¶m3=baz', + '/paramerrors/one_positional_kwargs/foo?' + 'param4=foo¶m2=bar¶m3=baz', + '/paramerrors/no_positional', + '/paramerrors/no_positional_args/foo', + '/paramerrors/no_positional_args/foo/bar/baz', + '/paramerrors/no_positional_args_kwargs?param1=foo¶m2=bar', + '/paramerrors/no_positional_args_kwargs/foo?param2=bar', + '/paramerrors/no_positional_args_kwargs/foo/bar/baz?' + 'param2=bar¶m3=baz', + '/paramerrors/no_positional_kwargs?param1=foo¶m2=bar', + '/paramerrors/callable_object', + ): + self.getPage(uri) + self.assertStatus(200) + + error_msgs = [ + 'Missing parameters', + 'Nothing matches the given URI', + 'Multiple values for parameters', + 'Unexpected query string parameters', + 'Unexpected body parameters', + 'Invalid path in Request-URI', + 'Illegal #fragment in Request-URI', + ] + + # uri should be tested for valid absolute path, the status must be 400. + for uri, error_idx in ( + ('invalid/path/without/leading/slash', 5), + ('/valid/path#invalid=fragment', 6), + ): + self.getPage(uri) + self.assertStatus(400) + self.assertInBody(error_msgs[error_idx]) + + # query string parameters are part of the URI, so if they are wrong + # for a particular handler, the status MUST be a 404. + for uri, msg in ( + ('/paramerrors/one_positional', error_msgs[0]), + ('/paramerrors/one_positional?foo=foo', error_msgs[0]), + ('/paramerrors/one_positional/foo/bar/baz', error_msgs[1]), + ('/paramerrors/one_positional/foo?param1=foo', error_msgs[2]), + ('/paramerrors/one_positional/foo?param1=foo¶m2=foo', + error_msgs[2]), + ('/paramerrors/one_positional_args/foo?param1=foo¶m2=foo', + error_msgs[2]), + ('/paramerrors/one_positional_args/foo/bar/baz?param2=foo', + error_msgs[3]), + ('/paramerrors/one_positional_args_kwargs/foo/bar/baz?' + 'param1=bar¶m3=baz', + error_msgs[2]), + ('/paramerrors/one_positional_kwargs/foo?' + 'param1=foo¶m2=bar¶m3=baz', + error_msgs[2]), + ('/paramerrors/no_positional/boo', error_msgs[1]), + ('/paramerrors/no_positional?param1=foo', error_msgs[3]), + ('/paramerrors/no_positional_args/boo?param1=foo', error_msgs[3]), + ('/paramerrors/no_positional_kwargs/boo?param1=foo', + error_msgs[1]), + ('/paramerrors/callable_object?param1=foo', error_msgs[3]), + ('/paramerrors/callable_object/boo', error_msgs[1]), + ): + for show_mismatched_params in (True, False): + cherrypy.config.update( + {'request.show_mismatched_params': show_mismatched_params}) + self.getPage(uri) + self.assertStatus(404) + if show_mismatched_params: + self.assertInBody(msg) + else: + self.assertInBody("Not Found") + + # if body parameters are wrong, a 400 must be returned. + for uri, body, msg in ( + ('/paramerrors/one_positional/foo', + 'param1=foo', error_msgs[2]), + ('/paramerrors/one_positional/foo', + 'param1=foo¶m2=foo', error_msgs[2]), + ('/paramerrors/one_positional_args/foo', + 'param1=foo¶m2=foo', error_msgs[2]), + ('/paramerrors/one_positional_args/foo/bar/baz', + 'param2=foo', error_msgs[4]), + ('/paramerrors/one_positional_args_kwargs/foo/bar/baz', + 'param1=bar¶m3=baz', error_msgs[2]), + ('/paramerrors/one_positional_kwargs/foo', + 'param1=foo¶m2=bar¶m3=baz', error_msgs[2]), + ('/paramerrors/no_positional', 'param1=foo', error_msgs[4]), + ('/paramerrors/no_positional_args/boo', + 'param1=foo', error_msgs[4]), + ('/paramerrors/callable_object', 'param1=foo', error_msgs[4]), + ): + for show_mismatched_params in (True, False): + cherrypy.config.update( + {'request.show_mismatched_params': show_mismatched_params}) + self.getPage(uri, method='POST', body=body) + self.assertStatus(400) + if show_mismatched_params: + self.assertInBody(msg) + else: + self.assertInBody("400 Bad") + + # even if body parameters are wrong, if we get the uri wrong, then + # it's a 404 + for uri, body, msg in ( + ('/paramerrors/one_positional?param2=foo', + 'param1=foo', error_msgs[3]), + ('/paramerrors/one_positional/foo/bar', + 'param2=foo', error_msgs[1]), + ('/paramerrors/one_positional_args/foo/bar?param2=foo', + 'param3=foo', error_msgs[3]), + ('/paramerrors/one_positional_kwargs/foo/bar', + 'param2=bar¶m3=baz', error_msgs[1]), + ('/paramerrors/no_positional?param1=foo', + 'param2=foo', error_msgs[3]), + ('/paramerrors/no_positional_args/boo?param2=foo', + 'param1=foo', error_msgs[3]), + ('/paramerrors/callable_object?param2=bar', + 'param1=foo', error_msgs[3]), + ): + for show_mismatched_params in (True, False): + cherrypy.config.update( + {'request.show_mismatched_params': show_mismatched_params}) + self.getPage(uri, method='POST', body=body) + self.assertStatus(404) + if show_mismatched_params: + self.assertInBody(msg) + else: + self.assertInBody("Not Found") + + # In the case that a handler raises a TypeError we should + # let that type error through. + for uri in ( + '/paramerrors/raise_type_error', + '/paramerrors/raise_type_error_with_default_param?x=0', + '/paramerrors/raise_type_error_with_default_param?x=0&y=0', + ): + self.getPage(uri, method='GET') + self.assertStatus(500) + self.assertTrue('Client Error', self.body) + + def testErrorHandling(self): + self.getPage("/error/missing") + self.assertStatus(404) + self.assertErrorPage(404, "The path '/error/missing' was not found.") + + ignore = helper.webtest.ignored_exceptions + ignore.append(ValueError) + try: + valerr = '\n raise ValueError()\nValueError' + self.getPage("/error/page_method") + self.assertErrorPage(500, pattern=valerr) + + self.getPage("/error/page_yield") + self.assertErrorPage(500, pattern=valerr) + + if (cherrypy.server.protocol_version == "HTTP/1.0" or + getattr(cherrypy.server, "using_apache", False)): + self.getPage("/error/page_streamed") + # Because this error is raised after the response body has + # started, the status should not change to an error status. + self.assertStatus(200) + self.assertBody("word up") + else: + # Under HTTP/1.1, the chunked transfer-coding is used. + # The HTTP client will choke when the output is incomplete. + self.assertRaises((ValueError, IncompleteRead), self.getPage, + "/error/page_streamed") + + # No traceback should be present + self.getPage("/error/cause_err_in_finalize") + msg = "Illegal response status from server ('ZOO' is non-numeric)." + self.assertErrorPage(500, msg, None) + finally: + ignore.pop() + + # Test HTTPError with a reason-phrase in the status arg. + self.getPage('/error/reason_phrase') + self.assertStatus("410 Gone fishin'") + + # Test custom error page for a specific error. + self.getPage("/error/custom") + self.assertStatus(404) + self.assertBody("Hello, world\r\n" + (" " * 499)) + + # Test custom error page for a specific error. + self.getPage("/error/custom?err=401") + self.assertStatus(401) + self.assertBody( + "Error 401 Unauthorized - " + "Well, I'm very sorry but you haven't paid!") + + # Test default custom error page. + self.getPage("/error/custom_default") + self.assertStatus(500) + self.assertBody( + "Error 500 Internal Server Error - " + "Well, I'm very sorry but you haven't paid!".ljust(513)) + + # Test error in custom error page (ticket #305). + # Note that the message is escaped for HTML (ticket #310). + self.getPage("/error/noexist") + self.assertStatus(404) + if sys.version_info >= (3, 3): + exc_name = "FileNotFoundError" + else: + exc_name = "IOError" + msg = ("No, <b>really</b>, not found!
" + "In addition, the custom error page failed:\n
" + "%s: [Errno 2] " + "No such file or directory: 'nonexistent.html'") % (exc_name,) + self.assertInBody(msg) + + if getattr(cherrypy.server, "using_apache", False): + pass + else: + # Test throw_errors (ticket #186). + self.getPage("/error/rethrow") + self.assertInBody("raise ValueError()") + + def testExpect(self): + e = ('Expect', '100-continue') + self.getPage("/headerelements/get_elements?headername=Expect", [e]) + self.assertBody('100-continue') + + self.getPage("/expect/expectation_failed", [e]) + self.assertStatus(417) + + def testHeaderElements(self): + # Accept-* header elements should be sorted, with most preferred first. + h = [('Accept', 'audio/*; q=0.2, audio/basic')] + self.getPage("/headerelements/get_elements?headername=Accept", h) + self.assertStatus(200) + self.assertBody("audio/basic\n" + "audio/*;q=0.2") + + h = [ + ('Accept', + 'text/plain; q=0.5, text/html, text/x-dvi; q=0.8, text/x-c') + ] + self.getPage("/headerelements/get_elements?headername=Accept", h) + self.assertStatus(200) + self.assertBody("text/x-c\n" + "text/html\n" + "text/x-dvi;q=0.8\n" + "text/plain;q=0.5") + + # Test that more specific media ranges get priority. + h = [('Accept', 'text/*, text/html, text/html;level=1, */*')] + self.getPage("/headerelements/get_elements?headername=Accept", h) + self.assertStatus(200) + self.assertBody("text/html;level=1\n" + "text/html\n" + "text/*\n" + "*/*") + + # Test Accept-Charset + h = [('Accept-Charset', 'iso-8859-5, unicode-1-1;q=0.8')] + self.getPage( + "/headerelements/get_elements?headername=Accept-Charset", h) + self.assertStatus("200 OK") + self.assertBody("iso-8859-5\n" + "unicode-1-1;q=0.8") + + # Test Accept-Encoding + h = [('Accept-Encoding', 'gzip;q=1.0, identity; q=0.5, *;q=0')] + self.getPage( + "/headerelements/get_elements?headername=Accept-Encoding", h) + self.assertStatus("200 OK") + self.assertBody("gzip;q=1.0\n" + "identity;q=0.5\n" + "*;q=0") + + # Test Accept-Language + h = [('Accept-Language', 'da, en-gb;q=0.8, en;q=0.7')] + self.getPage( + "/headerelements/get_elements?headername=Accept-Language", h) + self.assertStatus("200 OK") + self.assertBody("da\n" + "en-gb;q=0.8\n" + "en;q=0.7") + + # Test malformed header parsing. See + # https://github.com/cherrypy/cherrypy/issues/763. + self.getPage("/headerelements/get_elements?headername=Content-Type", + # Note the illegal trailing ";" + headers=[('Content-Type', 'text/html; charset=utf-8;')]) + self.assertStatus(200) + self.assertBody("text/html;charset=utf-8") + + def test_repeated_headers(self): + # Test that two request headers are collapsed into one. + # See https://github.com/cherrypy/cherrypy/issues/542. + self.getPage("/headers/Accept-Charset", + headers=[("Accept-Charset", "iso-8859-5"), + ("Accept-Charset", "unicode-1-1;q=0.8")]) + self.assertBody("iso-8859-5, unicode-1-1;q=0.8") + + # Tests that each header only appears once, regardless of case. + self.getPage("/headers/doubledheaders") + self.assertBody("double header test") + hnames = [name.title() for name, val in self.headers] + for key in ['Content-Length', 'Content-Type', 'Date', + 'Expires', 'Location', 'Server']: + self.assertEqual(hnames.count(key), 1, self.headers) + + def test_encoded_headers(self): + # First, make sure the innards work like expected. + self.assertEqual( + httputil.decode_TEXT(ntou("=?utf-8?q?f=C3=BCr?=")), ntou("f\xfcr")) + + if cherrypy.server.protocol_version == "HTTP/1.1": + # Test RFC-2047-encoded request and response header values + u = ntou('\u212bngstr\xf6m', 'escape') + c = ntou("=E2=84=ABngstr=C3=B6m") + self.getPage("/headers/ifmatch", + [('If-Match', ntou('=?utf-8?q?%s?=') % c)]) + # The body should be utf-8 encoded. + self.assertBody(ntob("\xe2\x84\xabngstr\xc3\xb6m")) + # But the Etag header should be RFC-2047 encoded (binary) + self.assertHeader("ETag", ntou('=?utf-8?b?4oSrbmdzdHLDtm0=?=')) + + # Test a *LONG* RFC-2047-encoded request and response header value + self.getPage("/headers/ifmatch", + [('If-Match', ntou('=?utf-8?q?%s?=') % (c * 10))]) + self.assertBody(ntob("\xe2\x84\xabngstr\xc3\xb6m") * 10) + # Note: this is different output for Python3, but it decodes fine. + etag = self.assertHeader( + "ETag", + '=?utf-8?b?4oSrbmdzdHLDtm3ihKtuZ3N0csO2beKEq25nc3Ryw7Zt' + '4oSrbmdzdHLDtm3ihKtuZ3N0csO2beKEq25nc3Ryw7Zt' + '4oSrbmdzdHLDtm3ihKtuZ3N0csO2beKEq25nc3Ryw7Zt' + '4oSrbmdzdHLDtm0=?=') + self.assertEqual(httputil.decode_TEXT(etag), u * 10) + + def test_header_presence(self): + # If we don't pass a Content-Type header, it should not be present + # in cherrypy.request.headers + self.getPage("/headers/Content-Type", + headers=[]) + self.assertStatus(500) + + # If Content-Type is present in the request, it should be present in + # cherrypy.request.headers + self.getPage("/headers/Content-Type", + headers=[("Content-type", "application/json")]) + self.assertBody("application/json") + + def test_basic_HTTPMethods(self): + helper.webtest.methods_with_bodies = ("POST", "PUT", "PROPFIND") + + # Test that all defined HTTP methods work. + for m in defined_http_methods: + self.getPage("/method/", method=m) + + # HEAD requests should not return any body. + if m == "HEAD": + self.assertBody("") + elif m == "TRACE": + # Some HTTP servers (like modpy) have their own TRACE support + self.assertEqual(self.body[:5], ntob("TRACE")) + else: + self.assertBody(m) + + # Request a PUT method with a form-urlencoded body + self.getPage("/method/parameterized", method="PUT", + body="data=on+top+of+other+things") + self.assertBody("on top of other things") + + # Request a PUT method with a file body + b = "one thing on top of another" + h = [("Content-Type", "text/plain"), + ("Content-Length", str(len(b)))] + self.getPage("/method/request_body", headers=h, method="PUT", body=b) + self.assertStatus(200) + self.assertBody(b) + + # Request a PUT method with a file body but no Content-Type. + # See https://github.com/cherrypy/cherrypy/issues/790. + b = ntob("one thing on top of another") + self.persistent = True + try: + conn = self.HTTP_CONN + conn.putrequest("PUT", "/method/request_body", skip_host=True) + conn.putheader("Host", self.HOST) + conn.putheader('Content-Length', str(len(b))) + conn.endheaders() + conn.send(b) + response = conn.response_class(conn.sock, method="PUT") + response.begin() + self.assertEqual(response.status, 200) + self.body = response.read() + self.assertBody(b) + finally: + self.persistent = False + + # Request a PUT method with no body whatsoever (not an empty one). + # See https://github.com/cherrypy/cherrypy/issues/650. + # Provide a C-T or webtest will provide one (and a C-L) for us. + h = [("Content-Type", "text/plain")] + self.getPage("/method/reachable", headers=h, method="PUT") + self.assertStatus(411) + + # Request a custom method with a request body + b = ('\n\n' + '' + '') + h = [('Content-Type', 'text/xml'), + ('Content-Length', str(len(b)))] + self.getPage("/method/request_body", headers=h, + method="PROPFIND", body=b) + self.assertStatus(200) + self.assertBody(b) + + # Request a disallowed method + self.getPage("/method/", method="LINK") + self.assertStatus(405) + + # Request an unknown method + self.getPage("/method/", method="SEARCH") + self.assertStatus(501) + + # For method dispatchers: make sure that an HTTP method doesn't + # collide with a virtual path atom. If you build HTTP-method + # dispatching into the core, rewrite these handlers to use + # your dispatch idioms. + self.getPage("/divorce/get?ID=13") + self.assertBody('Divorce document 13: empty') + self.assertStatus(200) + self.getPage("/divorce/", method="GET") + self.assertBody('

Choose your document

\n
    \n
') + self.assertStatus(200) + + def test_CONNECT_method(self): + if getattr(cherrypy.server, "using_apache", False): + return self.skip("skipped due to known Apache differences... ") + + self.getPage("/method/", method="CONNECT") + self.assertBody("CONNECT") + + def testEmptyThreadlocals(self): + results = [] + for x in range(20): + self.getPage("/threadlocal/") + results.append(self.body) + self.assertEqual(results, [ntob("None")] * 20) diff --git a/deps/cherrypy/test/test_routes.py b/deps/cherrypy/test/test_routes.py new file mode 100644 index 00000000..ccc76574 --- /dev/null +++ b/deps/cherrypy/test/test_routes.py @@ -0,0 +1,75 @@ +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + +import cherrypy + +from cherrypy.test import helper +import nose + + +class RoutesDispatchTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + + try: + import routes # noqa + except ImportError: + raise nose.SkipTest('Install routes to test RoutesDispatcher code') + + class Dummy: + + def index(self): + return "I said good day!" + + class City: + + def __init__(self, name): + self.name = name + self.population = 10000 + + @cherrypy.config(**{ + 'tools.response_headers.on': True, + 'tools.response_headers.headers': [ + ('Content-Language', 'en-GB'), + ], + }) + def index(self, **kwargs): + return "Welcome to %s, pop. %s" % (self.name, self.population) + + def update(self, **kwargs): + self.population = kwargs['pop'] + return "OK" + + d = cherrypy.dispatch.RoutesDispatcher() + d.connect(action='index', name='hounslow', route='/hounslow', + controller=City('Hounslow')) + d.connect( + name='surbiton', route='/surbiton', controller=City('Surbiton'), + action='index', conditions=dict(method=['GET'])) + d.mapper.connect('/surbiton', controller='surbiton', + action='update', conditions=dict(method=['POST'])) + d.connect('main', ':action', controller=Dummy()) + + conf = {'/': {'request.dispatch': d}} + cherrypy.tree.mount(root=None, config=conf) + + def test_Routes_Dispatch(self): + self.getPage("/hounslow") + self.assertStatus("200 OK") + self.assertBody("Welcome to Hounslow, pop. 10000") + + self.getPage("/foo") + self.assertStatus("404 Not Found") + + self.getPage("/surbiton") + self.assertStatus("200 OK") + self.assertBody("Welcome to Surbiton, pop. 10000") + + self.getPage("/surbiton", method="POST", body="pop=1327") + self.assertStatus("200 OK") + self.assertBody("OK") + self.getPage("/surbiton") + self.assertStatus("200 OK") + self.assertHeader("Content-Language", "en-GB") + self.assertBody("Welcome to Surbiton, pop. 1327") diff --git a/deps/cherrypy/test/test_session.py b/deps/cherrypy/test/test_session.py new file mode 100644 index 00000000..3e39e472 --- /dev/null +++ b/deps/cherrypy/test/test_session.py @@ -0,0 +1,491 @@ +import os +localDir = os.path.dirname(__file__) +import threading +import time + +import cherrypy +from cherrypy._cpcompat import copykeys, HTTPConnection, HTTPSConnection +from cherrypy.lib import sessions +from cherrypy.lib import reprconf +from cherrypy.lib.httputil import response_codes + + +def http_methods_allowed(methods=['GET', 'HEAD']): + method = cherrypy.request.method.upper() + if method not in methods: + cherrypy.response.headers['Allow'] = ", ".join(methods) + raise cherrypy.HTTPError(405) + +cherrypy.tools.allow = cherrypy.Tool('on_start_resource', http_methods_allowed) + + +def setup_server(): + + @cherrypy.config(**{ + 'tools.sessions.on': True, + 'tools.sessions.storage_class': sessions.RamSession, + 'tools.sessions.storage_path': localDir, + 'tools.sessions.timeout': (1.0 / 60), + 'tools.sessions.clean_freq': (1.0 / 60), + }) + class Root: + + @cherrypy.expose + def clear(self): + cherrypy.session.cache.clear() + + @cherrypy.expose + def data(self): + cherrypy.session['aha'] = 'foo' + return repr(cherrypy.session._data) + + @cherrypy.expose + def testGen(self): + counter = cherrypy.session.get('counter', 0) + 1 + cherrypy.session['counter'] = counter + yield str(counter) + + @cherrypy.expose + def testStr(self): + counter = cherrypy.session.get('counter', 0) + 1 + cherrypy.session['counter'] = counter + return str(counter) + + @cherrypy.expose + @cherrypy.config(**{'tools.sessions.on': False}) + def set_session_cls(self, new_cls_name): + new_cls = reprconf.attributes(new_cls_name) + cfg = {'tools.sessions.storage_class': new_cls} + self.__class__._cp_config.update(cfg) + if hasattr(cherrypy, "session"): + del cherrypy.session + if new_cls.clean_thread: + new_cls.clean_thread.stop() + new_cls.clean_thread.unsubscribe() + del new_cls.clean_thread + + @cherrypy.expose + def index(self): + sess = cherrypy.session + c = sess.get('counter', 0) + 1 + time.sleep(0.01) + sess['counter'] = c + return str(c) + + @cherrypy.expose + def keyin(self, key): + return str(key in cherrypy.session) + + @cherrypy.expose + def delete(self): + cherrypy.session.delete() + sessions.expire() + return "done" + + @cherrypy.expose + def delkey(self, key): + del cherrypy.session[key] + return "OK" + + @cherrypy.expose + def redir_target(self): + return self._cp_config['tools.sessions.storage_class'].__name__ + + @cherrypy.expose + def iredir(self): + raise cherrypy.InternalRedirect('/redir_target') + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.allow.on': True, + 'tools.allow.methods': ['GET'], + }) + def restricted(self): + return cherrypy.request.method + + @cherrypy.expose + def regen(self): + cherrypy.tools.sessions.regenerate() + return "logged in" + + @cherrypy.expose + def length(self): + return str(len(cherrypy.session)) + + @cherrypy.expose + @cherrypy.config(**{ + 'tools.sessions.path': '/session_cookie', + 'tools.sessions.name': 'temp', + 'tools.sessions.persistent': False, + }) + def session_cookie(self): + # Must load() to start the clean thread. + cherrypy.session.load() + return cherrypy.session.id + + cherrypy.tree.mount(Root()) + + +from cherrypy.test import helper + + +class SessionTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def tearDown(self): + # Clean up sessions. + for fname in os.listdir(localDir): + if fname.startswith(sessions.FileSession.SESSION_PREFIX): + os.unlink(os.path.join(localDir, fname)) + + def test_0_Session(self): + self.getPage('/set_session_cls/cherrypy.lib.sessions.RamSession') + self.getPage('/clear') + + # Test that a normal request gets the same id in the cookies. + # Note: this wouldn't work if /data didn't load the session. + self.getPage('/data') + self.assertBody("{'aha': 'foo'}") + c = self.cookies[0] + self.getPage('/data', self.cookies) + self.assertEqual(self.cookies[0], c) + + self.getPage('/testStr') + self.assertBody('1') + cookie_parts = dict([p.strip().split('=') + for p in self.cookies[0][1].split(";")]) + # Assert there is an 'expires' param + self.assertEqual(set(cookie_parts.keys()), + set(['session_id', 'expires', 'Path'])) + self.getPage('/testGen', self.cookies) + self.assertBody('2') + self.getPage('/testStr', self.cookies) + self.assertBody('3') + self.getPage('/data', self.cookies) + self.assertBody("{'aha': 'foo', 'counter': 3}") + self.getPage('/length', self.cookies) + self.assertBody('2') + self.getPage('/delkey?key=counter', self.cookies) + self.assertStatus(200) + + self.getPage('/set_session_cls/cherrypy.lib.sessions.FileSession') + self.getPage('/testStr') + self.assertBody('1') + self.getPage('/testGen', self.cookies) + self.assertBody('2') + self.getPage('/testStr', self.cookies) + self.assertBody('3') + self.getPage('/delkey?key=counter', self.cookies) + self.assertStatus(200) + + # Wait for the session.timeout (1 second) + time.sleep(2) + self.getPage('/') + self.assertBody('1') + self.getPage('/length', self.cookies) + self.assertBody('1') + + # Test session __contains__ + self.getPage('/keyin?key=counter', self.cookies) + self.assertBody("True") + cookieset1 = self.cookies + + # Make a new session and test __len__ again + self.getPage('/') + self.getPage('/length', self.cookies) + self.assertBody('2') + + # Test session delete + self.getPage('/delete', self.cookies) + self.assertBody("done") + self.getPage('/delete', cookieset1) + self.assertBody("done") + f = lambda: [ + x for x in os.listdir(localDir) if x.startswith('session-')] + self.assertEqual(f(), []) + + # Wait for the cleanup thread to delete remaining session files + self.getPage('/') + f = lambda: [ + x for x in os.listdir(localDir) if x.startswith('session-')] + self.assertNotEqual(f(), []) + time.sleep(2) + self.assertEqual(f(), []) + + def test_1_Ram_Concurrency(self): + self.getPage('/set_session_cls/cherrypy.lib.sessions.RamSession') + self._test_Concurrency() + + def test_2_File_Concurrency(self): + self.getPage('/set_session_cls/cherrypy.lib.sessions.FileSession') + self._test_Concurrency() + + def _test_Concurrency(self): + client_thread_count = 5 + request_count = 30 + + # Get initial cookie + self.getPage("/") + self.assertBody("1") + cookies = self.cookies + + data_dict = {} + errors = [] + + def request(index): + if self.scheme == 'https': + c = HTTPSConnection('%s:%s' % (self.interface(), self.PORT)) + else: + c = HTTPConnection('%s:%s' % (self.interface(), self.PORT)) + for i in range(request_count): + c.putrequest('GET', '/') + for k, v in cookies: + c.putheader(k, v) + c.endheaders() + response = c.getresponse() + body = response.read() + if response.status != 200 or not body.isdigit(): + errors.append((response.status, body)) + else: + data_dict[index] = max(data_dict[index], int(body)) + # Uncomment the following line to prove threads overlap. +## sys.stdout.write("%d " % index) + + # Start requests from each of + # concurrent clients + ts = [] + for c in range(client_thread_count): + data_dict[c] = 0 + t = threading.Thread(target=request, args=(c,)) + ts.append(t) + t.start() + + for t in ts: + t.join() + + hitcount = max(data_dict.values()) + expected = 1 + (client_thread_count * request_count) + + for e in errors: + print(e) + self.assertEqual(hitcount, expected) + + def test_3_Redirect(self): + # Start a new session + self.getPage('/testStr') + self.getPage('/iredir', self.cookies) + self.assertBody("FileSession") + + def test_4_File_deletion(self): + # Start a new session + self.getPage('/testStr') + # Delete the session file manually and retry. + id = self.cookies[0][1].split(";", 1)[0].split("=", 1)[1] + path = os.path.join(localDir, "session-" + id) + os.unlink(path) + self.getPage('/testStr', self.cookies) + + def test_5_Error_paths(self): + self.getPage('/unknown/page') + self.assertErrorPage(404, "The path '/unknown/page' was not found.") + + # Note: this path is *not* the same as above. The above + # takes a normal route through the session code; this one + # skips the session code's before_handler and only calls + # before_finalize (save) and on_end (close). So the session + # code has to survive calling save/close without init. + self.getPage('/restricted', self.cookies, method='POST') + self.assertErrorPage(405, response_codes[405][1]) + + def test_6_regenerate(self): + self.getPage('/testStr') + # grab the cookie ID + id1 = self.cookies[0][1].split(";", 1)[0].split("=", 1)[1] + self.getPage('/regen') + self.assertBody('logged in') + id2 = self.cookies[0][1].split(";", 1)[0].split("=", 1)[1] + self.assertNotEqual(id1, id2) + + self.getPage('/testStr') + # grab the cookie ID + id1 = self.cookies[0][1].split(";", 1)[0].split("=", 1)[1] + self.getPage('/testStr', + headers=[ + ('Cookie', + 'session_id=maliciousid; ' + 'expires=Sat, 27 Oct 2017 04:18:28 GMT; Path=/;')]) + id2 = self.cookies[0][1].split(";", 1)[0].split("=", 1)[1] + self.assertNotEqual(id1, id2) + self.assertNotEqual(id2, 'maliciousid') + + def test_7_session_cookies(self): + self.getPage('/set_session_cls/cherrypy.lib.sessions.RamSession') + self.getPage('/clear') + self.getPage('/session_cookie') + # grab the cookie ID + cookie_parts = dict([p.strip().split('=') + for p in self.cookies[0][1].split(";")]) + # Assert there is no 'expires' param + self.assertEqual(set(cookie_parts.keys()), set(['temp', 'Path'])) + id1 = cookie_parts['temp'] + self.assertEqual(copykeys(sessions.RamSession.cache), [id1]) + + # Send another request in the same "browser session". + self.getPage('/session_cookie', self.cookies) + cookie_parts = dict([p.strip().split('=') + for p in self.cookies[0][1].split(";")]) + # Assert there is no 'expires' param + self.assertEqual(set(cookie_parts.keys()), set(['temp', 'Path'])) + self.assertBody(id1) + self.assertEqual(copykeys(sessions.RamSession.cache), [id1]) + + # Simulate a browser close by just not sending the cookies + self.getPage('/session_cookie') + # grab the cookie ID + cookie_parts = dict([p.strip().split('=') + for p in self.cookies[0][1].split(";")]) + # Assert there is no 'expires' param + self.assertEqual(set(cookie_parts.keys()), set(['temp', 'Path'])) + # Assert a new id has been generated... + id2 = cookie_parts['temp'] + self.assertNotEqual(id1, id2) + self.assertEqual(set(sessions.RamSession.cache.keys()), + set([id1, id2])) + + # Wait for the session.timeout on both sessions + time.sleep(2.5) + cache = copykeys(sessions.RamSession.cache) + if cache: + if cache == [id2]: + self.fail("The second session did not time out.") + else: + self.fail("Unknown session id in cache: %r", cache) + + def test_8_Ram_Cleanup(self): + def lock(): + s1 = sessions.RamSession() + s1.acquire_lock() + time.sleep(1) + s1.release_lock() + + t = threading.Thread(target=lock) + t.start() + s2 = sessions.RamSession() + s2.clean_up() + self.assertEqual(len(sessions.RamSession.locks), 1, 'Clean up should not remove active lock') + t.join() + + +import socket +try: + import memcache # NOQA + + host, port = '127.0.0.1', 11211 + for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, + socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + s = None + try: + s = socket.socket(af, socktype, proto) + # See http://groups.google.com/group/cherrypy-users/ + # browse_frm/thread/bbfe5eb39c904fe0 + s.settimeout(1.0) + s.connect((host, port)) + s.close() + except socket.error: + if s: + s.close() + raise + break +except (ImportError, socket.error): + class MemcachedSessionTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test(self): + return self.skip("memcached not reachable ") +else: + class MemcachedSessionTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def test_0_Session(self): + self.getPage('/set_session_cls/cherrypy.Sessions.MemcachedSession') + + self.getPage('/testStr') + self.assertBody('1') + self.getPage('/testGen', self.cookies) + self.assertBody('2') + self.getPage('/testStr', self.cookies) + self.assertBody('3') + self.getPage('/length', self.cookies) + self.assertErrorPage(500) + self.assertInBody("NotImplementedError") + self.getPage('/delkey?key=counter', self.cookies) + self.assertStatus(200) + + # Wait for the session.timeout (1 second) + time.sleep(1.25) + self.getPage('/') + self.assertBody('1') + + # Test session __contains__ + self.getPage('/keyin?key=counter', self.cookies) + self.assertBody("True") + + # Test session delete + self.getPage('/delete', self.cookies) + self.assertBody("done") + + def test_1_Concurrency(self): + client_thread_count = 5 + request_count = 30 + + # Get initial cookie + self.getPage("/") + self.assertBody("1") + cookies = self.cookies + + data_dict = {} + + def request(index): + for i in range(request_count): + self.getPage("/", cookies) + # Uncomment the following line to prove threads overlap. +## sys.stdout.write("%d " % index) + if not self.body.isdigit(): + self.fail(self.body) + data_dict[index] = int(self.body) + + # Start concurrent requests from + # each of clients + ts = [] + for c in range(client_thread_count): + data_dict[c] = 0 + t = threading.Thread(target=request, args=(c,)) + ts.append(t) + t.start() + + for t in ts: + t.join() + + hitcount = max(data_dict.values()) + expected = 1 + (client_thread_count * request_count) + self.assertEqual(hitcount, expected) + + def test_3_Redirect(self): + # Start a new session + self.getPage('/testStr') + self.getPage('/iredir', self.cookies) + self.assertBody("memcached") + + def test_5_Error_paths(self): + self.getPage('/unknown/page') + self.assertErrorPage( + 404, "The path '/unknown/page' was not found.") + + # Note: this path is *not* the same as above. The above + # takes a normal route through the session code; this one + # skips the session code's before_handler and only calls + # before_finalize (save) and on_end (close). So the session + # code has to survive calling save/close without init. + self.getPage('/restricted', self.cookies, method='POST') + self.assertErrorPage(405, response_codes[405][1]) diff --git a/deps/cherrypy/test/test_sessionauthenticate.py b/deps/cherrypy/test/test_sessionauthenticate.py new file mode 100644 index 00000000..8988e985 --- /dev/null +++ b/deps/cherrypy/test/test_sessionauthenticate.py @@ -0,0 +1,61 @@ +import cherrypy +from cherrypy.test import helper + + +class SessionAuthenticateTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + + def check(username, password): + # Dummy check_username_and_password function + if username != 'test' or password != 'password': + return 'Wrong login/password' + + def augment_params(): + # A simple tool to add some things to request.params + # This is to check to make sure that session_auth can handle + # request params (ticket #780) + cherrypy.request.params["test"] = "test" + + cherrypy.tools.augment_params = cherrypy.Tool( + 'before_handler', augment_params, None, priority=30) + + class Test: + + _cp_config = { + 'tools.sessions.on': True, + 'tools.session_auth.on': True, + 'tools.session_auth.check_username_and_password': check, + 'tools.augment_params.on': True, + } + + @cherrypy.expose + def index(self, **kwargs): + return "Hi %s, you are logged in" % cherrypy.request.login + + cherrypy.tree.mount(Test()) + + def testSessionAuthenticate(self): + # request a page and check for login form + self.getPage('/') + self.assertInBody('
') + + # setup credentials + login_body = 'username=test&password=password&from_page=/' + + # attempt a login + self.getPage('/do_login', method='POST', body=login_body) + self.assertStatus((302, 303)) + + # get the page now that we are logged in + self.getPage('/', self.cookies) + self.assertBody('Hi test, you are logged in') + + # do a logout + self.getPage('/do_logout', self.cookies, method='POST') + self.assertStatus((302, 303)) + + # verify we are logged out + self.getPage('/', self.cookies) + self.assertInBody('') diff --git a/deps/cherrypy/test/test_states.py b/deps/cherrypy/test/test_states.py new file mode 100644 index 00000000..412b52f3 --- /dev/null +++ b/deps/cherrypy/test/test_states.py @@ -0,0 +1,527 @@ +import os +import signal +import socket +import sys +import time +import unittest +import warnings + +import cherrypy +import cherrypy.process.servers +from cherrypy._cpcompat import BadStatusLine, ntob +from cherrypy.test import helper + +engine = cherrypy.engine +thisdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + + +class Dependency: + + def __init__(self, bus): + self.bus = bus + self.running = False + self.startcount = 0 + self.gracecount = 0 + self.threads = {} + + def subscribe(self): + self.bus.subscribe('start', self.start) + self.bus.subscribe('stop', self.stop) + self.bus.subscribe('graceful', self.graceful) + self.bus.subscribe('start_thread', self.startthread) + self.bus.subscribe('stop_thread', self.stopthread) + + def start(self): + self.running = True + self.startcount += 1 + + def stop(self): + self.running = False + + def graceful(self): + self.gracecount += 1 + + def startthread(self, thread_id): + self.threads[thread_id] = None + + def stopthread(self, thread_id): + del self.threads[thread_id] + +db_connection = Dependency(engine) + + +def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "Hello World" + + @cherrypy.expose + def ctrlc(self): + raise KeyboardInterrupt() + + @cherrypy.expose + def graceful(self): + engine.graceful() + return "app was (gracefully) restarted succesfully" + + @cherrypy.expose + def block_explicit(self): + while True: + if cherrypy.response.timed_out: + cherrypy.response.timed_out = False + return "broken!" + time.sleep(0.01) + + @cherrypy.expose + def block_implicit(self): + time.sleep(0.5) + return "response.timeout = %s" % cherrypy.response.timeout + + cherrypy.tree.mount(Root()) + cherrypy.config.update({ + 'environment': 'test_suite', + 'engine.timeout_monitor.frequency': 0.1, + }) + + db_connection.subscribe() + +# ------------ Enough helpers. Time for real live test cases. ------------ # + + +class ServerStateTests(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def setUp(self): + cherrypy.server.socket_timeout = 0.1 + self.do_gc_test = False + + def test_0_NormalStateFlow(self): + engine.stop() + # Our db_connection should not be running + self.assertEqual(db_connection.running, False) + self.assertEqual(db_connection.startcount, 1) + self.assertEqual(len(db_connection.threads), 0) + + # Test server start + engine.start() + self.assertEqual(engine.state, engine.states.STARTED) + + host = cherrypy.server.socket_host + port = cherrypy.server.socket_port + self.assertRaises(IOError, cherrypy._cpserver.check_port, host, port) + + # The db_connection should be running now + self.assertEqual(db_connection.running, True) + self.assertEqual(db_connection.startcount, 2) + self.assertEqual(len(db_connection.threads), 0) + + self.getPage("/") + self.assertBody("Hello World") + self.assertEqual(len(db_connection.threads), 1) + + # Test engine stop. This will also stop the HTTP server. + engine.stop() + self.assertEqual(engine.state, engine.states.STOPPED) + + # Verify that our custom stop function was called + self.assertEqual(db_connection.running, False) + self.assertEqual(len(db_connection.threads), 0) + + # Block the main thread now and verify that exit() works. + def exittest(): + self.getPage("/") + self.assertBody("Hello World") + engine.exit() + cherrypy.server.start() + engine.start_with_callback(exittest) + engine.block() + self.assertEqual(engine.state, engine.states.EXITING) + + def test_1_Restart(self): + cherrypy.server.start() + engine.start() + + # The db_connection should be running now + self.assertEqual(db_connection.running, True) + grace = db_connection.gracecount + + self.getPage("/") + self.assertBody("Hello World") + self.assertEqual(len(db_connection.threads), 1) + + # Test server restart from this thread + engine.graceful() + self.assertEqual(engine.state, engine.states.STARTED) + self.getPage("/") + self.assertBody("Hello World") + self.assertEqual(db_connection.running, True) + self.assertEqual(db_connection.gracecount, grace + 1) + self.assertEqual(len(db_connection.threads), 1) + + # Test server restart from inside a page handler + self.getPage("/graceful") + self.assertEqual(engine.state, engine.states.STARTED) + self.assertBody("app was (gracefully) restarted succesfully") + self.assertEqual(db_connection.running, True) + self.assertEqual(db_connection.gracecount, grace + 2) + # Since we are requesting synchronously, is only one thread used? + # Note that the "/graceful" request has been flushed. + self.assertEqual(len(db_connection.threads), 0) + + engine.stop() + self.assertEqual(engine.state, engine.states.STOPPED) + self.assertEqual(db_connection.running, False) + self.assertEqual(len(db_connection.threads), 0) + + def test_2_KeyboardInterrupt(self): + # Raise a keyboard interrupt in the HTTP server's main thread. + # We must start the server in this, the main thread + engine.start() + cherrypy.server.start() + + self.persistent = True + try: + # Make the first request and assert there's no "Connection: close". + self.getPage("/") + self.assertStatus('200 OK') + self.assertBody("Hello World") + self.assertNoHeader("Connection") + + cherrypy.server.httpserver.interrupt = KeyboardInterrupt + engine.block() + + self.assertEqual(db_connection.running, False) + self.assertEqual(len(db_connection.threads), 0) + self.assertEqual(engine.state, engine.states.EXITING) + finally: + self.persistent = False + + # Raise a keyboard interrupt in a page handler; on multithreaded + # servers, this should occur in one of the worker threads. + # This should raise a BadStatusLine error, since the worker + # thread will just die without writing a response. + engine.start() + cherrypy.server.start() + # From python3.5 a new exception is retuned when the connection + # ends abruptly: + # http.client.RemoteDisconnected + # RemoteDisconnected is a subclass of: + # (ConnectionResetError, http.client.BadStatusLine) + # and ConnectionResetError is an indirect subclass of: + # OSError + # From python 3.3 an up socket.error is an alias to OSError + # following PEP-3151, therefore http.client.RemoteDisconnected + # is considered a socket.error. + # + # raise_subcls specifies the classes that are not going + # to be considered as a socket.error for the retries. + # Given that RemoteDisconnected is part BadStatusLine + # we can use the same call for all py3 versions without + # sideffects. python < 3.5 will raise directly BadStatusLine + # which is not a subclass for socket.error/OSError. + try: + self.getPage("/ctrlc", raise_subcls=BadStatusLine) + except BadStatusLine: + pass + else: + print(self.body) + self.fail("AssertionError: BadStatusLine not raised") + + engine.block() + self.assertEqual(db_connection.running, False) + self.assertEqual(len(db_connection.threads), 0) + + def test_3_Deadlocks(self): + cherrypy.config.update({'response.timeout': 0.2}) + + engine.start() + cherrypy.server.start() + try: + self.assertNotEqual(engine.timeout_monitor.thread, None) + + # Request a "normal" page. + self.assertEqual(engine.timeout_monitor.servings, []) + self.getPage("/") + self.assertBody("Hello World") + # request.close is called async. + while engine.timeout_monitor.servings: + sys.stdout.write(".") + time.sleep(0.01) + + # Request a page that explicitly checks itself for deadlock. + # The deadlock_timeout should be 2 secs. + self.getPage("/block_explicit") + self.assertBody("broken!") + + # Request a page that implicitly breaks deadlock. + # If we deadlock, we want to touch as little code as possible, + # so we won't even call handle_error, just bail ASAP. + self.getPage("/block_implicit") + self.assertStatus(500) + self.assertInBody("raise cherrypy.TimeoutError()") + finally: + engine.exit() + + def test_4_Autoreload(self): + # If test_3 has not been executed, the server won't be stopped, + # so we'll have to do it. + if engine.state != engine.states.EXITING: + engine.exit() + + # Start the demo script in a new process + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https')) + p.write_conf(extra='test_case_name: "test_4_Autoreload"') + p.start(imports='cherrypy.test._test_states_demo') + try: + self.getPage("/start") + start = float(self.body) + + # Give the autoreloader time to cache the file time. + time.sleep(2) + + # Touch the file + os.utime(os.path.join(thisdir, "_test_states_demo.py"), None) + + # Give the autoreloader time to re-exec the process + time.sleep(2) + host = cherrypy.server.socket_host + port = cherrypy.server.socket_port + cherrypy._cpserver.wait_for_occupied_port(host, port) + + self.getPage("/start") + if not (float(self.body) > start): + raise AssertionError("start time %s not greater than %s" % + (float(self.body), start)) + finally: + # Shut down the spawned process + self.getPage("/exit") + p.join() + + def test_5_Start_Error(self): + # If test_3 has not been executed, the server won't be stopped, + # so we'll have to do it. + if engine.state != engine.states.EXITING: + engine.exit() + + # If a process errors during start, it should stop the engine + # and exit with a non-zero exit code. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'), + wait=True) + p.write_conf( + extra="""starterror: True +test_case_name: "test_5_Start_Error" +""" + ) + p.start(imports='cherrypy.test._test_states_demo') + if p.exit_code == 0: + self.fail("Process failed to return nonzero exit code.") + + +class PluginTests(helper.CPWebCase): + + def test_daemonize(self): + if os.name not in ['posix']: + return self.skip("skipped (not on posix) ") + self.HOST = '127.0.0.1' + self.PORT = 8081 + # Spawn the process and wait, when this returns, the original process + # is finished. If it daemonized properly, we should still be able + # to access pages. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'), + wait=True, daemonize=True, + socket_host='127.0.0.1', + socket_port=8081) + p.write_conf( + extra='test_case_name: "test_daemonize"') + p.start(imports='cherrypy.test._test_states_demo') + try: + # Just get the pid of the daemonization process. + self.getPage("/pid") + self.assertStatus(200) + page_pid = int(self.body) + self.assertEqual(page_pid, p.get_pid()) + finally: + # Shut down the spawned process + self.getPage("/exit") + p.join() + + # Wait until here to test the exit code because we want to ensure + # that we wait for the daemon to finish running before we fail. + if p.exit_code != 0: + self.fail("Daemonized parent process failed to exit cleanly.") + + +class SignalHandlingTests(helper.CPWebCase): + + def test_SIGHUP_tty(self): + # When not daemonized, SIGHUP should shut down the server. + try: + from signal import SIGHUP + except ImportError: + return self.skip("skipped (no SIGHUP) ") + + # Spawn the process. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https')) + p.write_conf( + extra='test_case_name: "test_SIGHUP_tty"') + p.start(imports='cherrypy.test._test_states_demo') + # Send a SIGHUP + os.kill(p.get_pid(), SIGHUP) + # This might hang if things aren't working right, but meh. + p.join() + + def test_SIGHUP_daemonized(self): + # When daemonized, SIGHUP should restart the server. + try: + from signal import SIGHUP + except ImportError: + return self.skip("skipped (no SIGHUP) ") + + if os.name not in ['posix']: + return self.skip("skipped (not on posix) ") + + # Spawn the process and wait, when this returns, the original process + # is finished. If it daemonized properly, we should still be able + # to access pages. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'), + wait=True, daemonize=True) + p.write_conf( + extra='test_case_name: "test_SIGHUP_daemonized"') + p.start(imports='cherrypy.test._test_states_demo') + + pid = p.get_pid() + try: + # Send a SIGHUP + os.kill(pid, SIGHUP) + # Give the server some time to restart + time.sleep(2) + self.getPage("/pid") + self.assertStatus(200) + new_pid = int(self.body) + self.assertNotEqual(new_pid, pid) + finally: + # Shut down the spawned process + self.getPage("/exit") + p.join() + + def _require_signal_and_kill(self, signal_name): + if not hasattr(signal, signal_name): + self.skip("skipped (no %(signal_name)s)" % vars()) + + if not hasattr(os, 'kill'): + self.skip("skipped (no os.kill)") + + def test_SIGTERM(self): + "SIGTERM should shut down the server whether daemonized or not." + self._require_signal_and_kill('SIGTERM') + + # Spawn a normal, undaemonized process. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https')) + p.write_conf( + extra='test_case_name: "test_SIGTERM"') + p.start(imports='cherrypy.test._test_states_demo') + # Send a SIGTERM + os.kill(p.get_pid(), signal.SIGTERM) + # This might hang if things aren't working right, but meh. + p.join() + + if os.name in ['posix']: + # Spawn a daemonized process and test again. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'), + wait=True, daemonize=True) + p.write_conf( + extra='test_case_name: "test_SIGTERM_2"') + p.start(imports='cherrypy.test._test_states_demo') + # Send a SIGTERM + os.kill(p.get_pid(), signal.SIGTERM) + # This might hang if things aren't working right, but meh. + p.join() + + def test_signal_handler_unsubscribe(self): + self._require_signal_and_kill('SIGTERM') + + # Although Windows has `os.kill` and SIGTERM is defined, the + # platform does not implement signals and sending SIGTERM + # will result in a forced termination of the process. + # Therefore, this test is not suitable for Windows. + if os.name == 'nt': + self.skip("SIGTERM not available") + + # Spawn a normal, undaemonized process. + p = helper.CPProcess(ssl=(self.scheme.lower() == 'https')) + p.write_conf( + extra="""unsubsig: True +test_case_name: "test_signal_handler_unsubscribe" +""") + p.start(imports='cherrypy.test._test_states_demo') + # Ask the process to quit + os.kill(p.get_pid(), signal.SIGTERM) + # This might hang if things aren't working right, but meh. + p.join() + + # Assert the old handler ran. + target_line = open(p.error_log, 'rb').readlines()[-10] + if not ntob("I am an old SIGTERM handler.") in target_line: + self.fail("Old SIGTERM handler did not run.\n%r" % target_line) + + +class WaitTests(unittest.TestCase): + + def test_wait_for_occupied_port_INADDR_ANY(self): + """ + Wait on INADDR_ANY should not raise IOError + + In cases where the loopback interface does not exist, CherryPy cannot + effectively determine if a port binding to INADDR_ANY was effected. + In this situation, CherryPy should assume that it failed to detect + the binding (not that the binding failed) and only warn that it could + not verify it. + """ + # At such a time that CherryPy can reliably determine one or more + # viable IP addresses of the host, this test may be removed. + + # Simulate the behavior we observe when no loopback interface is + # present by: finding a port that's not occupied, then wait on it. + + free_port = self.find_free_port() + + servers = cherrypy.process.servers + + def with_shorter_timeouts(func): + """ + A context where occupied_port_timeout is much smaller to speed + test runs. + """ + # When we have Python 2.5, simplify using the with_statement. + orig_timeout = servers.occupied_port_timeout + servers.occupied_port_timeout = .07 + try: + func() + finally: + servers.occupied_port_timeout = orig_timeout + + def do_waiting(): + # Wait on the free port that's unbound + with warnings.catch_warnings(record=True) as w: + servers.wait_for_occupied_port('0.0.0.0', free_port) + self.assertEqual(len(w), 1) + self.assertTrue(isinstance(w[0], warnings.WarningMessage)) + self.assertTrue( + 'Unable to verify that the server is bound on ' in str(w[0])) + + # The wait should still raise an IO error if INADDR_ANY was + # not supplied. + self.assertRaises(IOError, servers.wait_for_occupied_port, + '127.0.0.1', free_port) + + with_shorter_timeouts(do_waiting) + + def find_free_port(self): + "Find a free port by binding to port 0 then unbinding." + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.bind(('', 0)) + free_port = sock.getsockname()[1] + sock.close() + return free_port diff --git a/deps/cherrypy/test/test_static.py b/deps/cherrypy/test/test_static.py new file mode 100644 index 00000000..f98d17c2 --- /dev/null +++ b/deps/cherrypy/test/test_static.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +import os +import sys +import io +import contextlib + +from six.moves import urllib + +from cherrypy._cpcompat import ( + HTTPConnection, HTTPSConnection, ntou, tonative, +) + +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) +has_space_filepath = os.path.join(curdir, 'static', 'has space.html') +bigfile_filepath = os.path.join(curdir, "static", "bigfile.log") + +# The file size needs to be big enough such that half the size of it +# won't be socket-buffered (or server-buffered) all in one go. See +# test_file_stream. +MB = 2 ** 20 +BIGFILE_SIZE = 32 * MB + +import cherrypy +from cherrypy.lib import static +from cherrypy.test import helper + + +class StaticTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + if not os.path.exists(has_space_filepath): + with open(has_space_filepath, 'wb') as f: + f.write(b'Hello, world\r\n') + needs_bigfile = ( + not os.path.exists(bigfile_filepath) + or os.path.getsize(bigfile_filepath) != BIGFILE_SIZE + ) + if needs_bigfile: + with open(bigfile_filepath, 'wb') as f: + f.write(b"x" * BIGFILE_SIZE) + + class Root: + + @cherrypy.expose + @cherrypy.config(**{'response.stream': True}) + def bigfile(self): + from cherrypy.lib import static + self.f = static.serve_file(bigfile_filepath) + return self.f + + @cherrypy.expose + def tell(self): + if self.f.input.closed: + return '' + return repr(self.f.input.tell()).rstrip('L') + + @cherrypy.expose + def fileobj(self): + f = open(os.path.join(curdir, 'style.css'), 'rb') + return static.serve_fileobj(f, content_type='text/css') + + @cherrypy.expose + def bytesio(self): + f = io.BytesIO(b'Fee\nfie\nfo\nfum') + return static.serve_fileobj(f, content_type='text/plain') + + class Static: + + @cherrypy.expose + def index(self): + return 'You want the Baron? You can have the Baron!' + + @cherrypy.expose + def dynamic(self): + return "This is a DYNAMIC page" + + root = Root() + root.static = Static() + + rootconf = { + '/static': { + 'tools.staticdir.on': True, + 'tools.staticdir.dir': 'static', + 'tools.staticdir.root': curdir, + }, + '/style.css': { + 'tools.staticfile.on': True, + 'tools.staticfile.filename': os.path.join(curdir, 'style.css'), + }, + '/docroot': { + 'tools.staticdir.on': True, + 'tools.staticdir.root': curdir, + 'tools.staticdir.dir': 'static', + 'tools.staticdir.index': 'index.html', + }, + '/error': { + 'tools.staticdir.on': True, + 'request.show_tracebacks': True, + }, + '/404test': { + 'tools.staticdir.on': True, + 'tools.staticdir.root': curdir, + 'tools.staticdir.dir': 'static', + 'error_page.404': error_page_404, + } + } + rootApp = cherrypy.Application(root) + rootApp.merge(rootconf) + + test_app_conf = { + '/test': { + 'tools.staticdir.index': 'index.html', + 'tools.staticdir.on': True, + 'tools.staticdir.root': curdir, + 'tools.staticdir.dir': 'static', + }, + } + testApp = cherrypy.Application(Static()) + testApp.merge(test_app_conf) + + vhost = cherrypy._cpwsgi.VirtualHost(rootApp, {'virt.net': testApp}) + cherrypy.tree.graft(vhost) + + @staticmethod + def teardown_server(): + for f in (has_space_filepath, bigfile_filepath): + if os.path.exists(f): + try: + os.unlink(f) + except: + pass + + def test_static(self): + self.getPage("/static/index.html") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html') + self.assertBody('Hello, world\r\n') + + # Using a staticdir.root value in a subdir... + self.getPage("/docroot/index.html") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html') + self.assertBody('Hello, world\r\n') + + # Check a filename with spaces in it + self.getPage("/static/has%20space.html") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html') + self.assertBody('Hello, world\r\n') + + self.getPage("/style.css") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/css') + # Note: The body should be exactly 'Dummy stylesheet\n', but + # unfortunately some tools such as WinZip sometimes turn \n + # into \r\n on Windows when extracting the CherryPy tarball so + # we just check the content + self.assertMatchesBody('^Dummy stylesheet') + + def test_fallthrough(self): + # Test that NotFound will then try dynamic handlers (see [878]). + self.getPage("/static/dynamic") + self.assertBody("This is a DYNAMIC page") + + # Check a directory via fall-through to dynamic handler. + self.getPage("/static/") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html;charset=utf-8') + self.assertBody('You want the Baron? You can have the Baron!') + + def test_index(self): + # Check a directory via "staticdir.index". + self.getPage("/docroot/") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/html') + self.assertBody('Hello, world\r\n') + # The same page should be returned even if redirected. + self.getPage("/docroot") + self.assertStatus(301) + self.assertHeader('Location', '%s/docroot/' % self.base()) + self.assertMatchesBody("This resource .* " + "%s/docroot/." % (self.base(), self.base())) + + def test_config_errors(self): + # Check that we get an error if no .file or .dir + self.getPage("/error/thing.html") + self.assertErrorPage(500) + if sys.version_info >= (3, 3): + errmsg = ( + "TypeError: staticdir\(\) missing 2 " + "required positional arguments" + ) + else: + errmsg = ( + "TypeError: staticdir\(\) takes at least 2 " + "(positional )?arguments \(0 given\)" + ) + self.assertMatchesBody(errmsg.encode('ascii')) + + def test_security(self): + # Test up-level security + self.getPage("/static/../../test/style.css") + self.assertStatus((400, 403)) + + def test_modif(self): + # Test modified-since on a reasonably-large file + self.getPage("/static/dirback.jpg") + self.assertStatus("200 OK") + lastmod = "" + for k, v in self.headers: + if k == 'Last-Modified': + lastmod = v + ims = ("If-Modified-Since", lastmod) + self.getPage("/static/dirback.jpg", headers=[ims]) + self.assertStatus(304) + self.assertNoHeader("Content-Type") + self.assertNoHeader("Content-Length") + self.assertNoHeader("Content-Disposition") + self.assertBody("") + + def test_755_vhost(self): + self.getPage("/test/", [('Host', 'virt.net')]) + self.assertStatus(200) + self.getPage("/test", [('Host', 'virt.net')]) + self.assertStatus(301) + self.assertHeader('Location', self.scheme + '://virt.net/test/') + + def test_serve_fileobj(self): + self.getPage("/fileobj") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/css;charset=utf-8') + self.assertMatchesBody('^Dummy stylesheet') + + def test_serve_bytesio(self): + self.getPage("/bytesio") + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/plain;charset=utf-8') + self.assertHeader('Content-Length', 14) + self.assertMatchesBody('Fee\nfie\nfo\nfum') + + def test_file_stream(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Make an initial request + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("GET", "/bigfile", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 200) + + body = b'' + remaining = BIGFILE_SIZE + while remaining > 0: + data = response.fp.read(65536) + if not data: + break + body += data + remaining -= len(data) + + if self.scheme == "https": + newconn = HTTPSConnection + else: + newconn = HTTPConnection + s, h, b = helper.webtest.openURL( + b"/tell", headers=[], host=self.HOST, port=self.PORT, + http_conn=newconn) + if not b: + # The file was closed on the server. + tell_position = BIGFILE_SIZE + else: + tell_position = int(b) + + read_so_far = len(body) + + # It is difficult for us to force the server to only read + # the bytes that we ask for - there are going to be buffers + # inbetween. + # + # CherryPy will attempt to write as much data as it can to + # the socket, and we don't have a way to determine what that + # size will be. So we make the following assumption - by + # the time we have read in the entire file on the server, + # we will have at least received half of it. If this is not + # the case, then this is an indicator that either: + # - machines that are running this test are using buffer + # sizes greater than half of BIGFILE_SIZE; or + # - streaming is broken. + # + # At the time of writing, we seem to have encountered + # buffer sizes bigger than 512K, so we've increased + # BIGFILE_SIZE to 4MB and in 2016 to 20MB and then 32MB. + # This test is going to keep failing according to the + # improvements in hardware and OS buffers. + if tell_position >= BIGFILE_SIZE: + if read_so_far < (BIGFILE_SIZE / 2): + self.fail( + "The file should have advanced to position %r, but " + "has already advanced to the end of the file. It " + "may not be streamed as intended, or at the wrong " + "chunk size (64k)" % read_so_far) + elif tell_position < read_so_far: + self.fail( + "The file should have advanced to position %r, but has " + "only advanced to position %r. It may not be streamed " + "as intended, or at the wrong chunk size (64k)" % + (read_so_far, tell_position)) + + if body != b"x" * BIGFILE_SIZE: + self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." % + (BIGFILE_SIZE, body[:50], len(body))) + conn.close() + + def test_file_stream_deadlock(self): + if cherrypy.server.protocol_version != "HTTP/1.1": + return self.skip() + + self.PROTOCOL = "HTTP/1.1" + + # Make an initial request but abort early. + self.persistent = True + conn = self.HTTP_CONN + conn.putrequest("GET", "/bigfile", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + response = conn.response_class(conn.sock, method="GET") + response.begin() + self.assertEqual(response.status, 200) + body = response.fp.read(65536) + if body != b"x" * len(body): + self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." % + (65536, body[:50], len(body))) + response.close() + conn.close() + + # Make a second request, which should fetch the whole file. + self.persistent = False + self.getPage("/bigfile") + if self.body != b"x" * BIGFILE_SIZE: + self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." % + (BIGFILE_SIZE, self.body[:50], len(body))) + + def test_error_page_with_serve_file(self): + self.getPage("/404test/yunyeen") + self.assertStatus(404) + self.assertInBody("I couldn't find that thing") + + def test_null_bytes(self): + self.getPage("/static/\x00") + self.assertStatus('404 Not Found') + + @staticmethod + @contextlib.contextmanager + def unicode_file(): + filename = ntou("Слава Україні.html", 'utf-8') + filepath = os.path.join(curdir, "static", filename) + with io.open(filepath, 'w', encoding='utf-8') as strm: + strm.write(ntou("Героям Слава!", 'utf-8')) + try: + yield + finally: + os.remove(filepath) + + def test_unicode(self): + with self.unicode_file(): + url = ntou("/static/Слава Україні.html", 'utf-8') + # quote function requires str + url = tonative(url, 'utf-8') + url = urllib.parse.quote(url) + self.getPage(url) + + expected = ntou("Героям Слава!", 'utf-8') + self.assertInBody(expected) + + +def error_page_404(status, message, traceback, version): + path = os.path.join(curdir, 'static', '404.html') + return static.serve_file(path, content_type='text/html') diff --git a/deps/cherrypy/test/test_tools.py b/deps/cherrypy/test/test_tools.py new file mode 100644 index 00000000..489a48b3 --- /dev/null +++ b/deps/cherrypy/test/test_tools.py @@ -0,0 +1,445 @@ +"""Test the various means of instantiating and invoking tools.""" + +import gzip +import sys +import unittest +import io + +from cherrypy._cpcompat import copyitems, itervalues +from cherrypy._cpcompat import IncompleteRead, ntob, ntou, xrange +import time +timeout = 0.2 +import types + +import six + +import cherrypy +from cherrypy import tools + + +europoundUnicode = ntou('\x80\xa3') + + +# Client-side code # + +from cherrypy.test import helper + + +class ToolTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + + # Put check_access in a custom toolbox with its own namespace + myauthtools = cherrypy._cptools.Toolbox("myauth") + + def check_access(default=False): + if not getattr(cherrypy.request, "userid", default): + raise cherrypy.HTTPError(401) + myauthtools.check_access = cherrypy.Tool( + 'before_request_body', check_access) + + def numerify(): + def number_it(body): + for chunk in body: + for k, v in cherrypy.request.numerify_map: + chunk = chunk.replace(k, v) + yield chunk + cherrypy.response.body = number_it(cherrypy.response.body) + + class NumTool(cherrypy.Tool): + + def _setup(self): + def makemap(): + m = self._merged_args().get("map", {}) + cherrypy.request.numerify_map = copyitems(m) + cherrypy.request.hooks.attach('on_start_resource', makemap) + + def critical(): + cherrypy.request.error_response = cherrypy.HTTPError( + 502).set_response + critical.failsafe = True + + cherrypy.request.hooks.attach('on_start_resource', critical) + cherrypy.request.hooks.attach(self._point, self.callable) + + tools.numerify = NumTool('before_finalize', numerify) + + # It's not mandatory to inherit from cherrypy.Tool. + class NadsatTool: + + def __init__(self): + self.ended = {} + self._name = "nadsat" + + def nadsat(self): + def nadsat_it_up(body): + for chunk in body: + chunk = chunk.replace(ntob("good"), ntob("horrorshow")) + chunk = chunk.replace(ntob("piece"), ntob("lomtick")) + yield chunk + cherrypy.response.body = nadsat_it_up(cherrypy.response.body) + nadsat.priority = 0 + + def cleanup(self): + # This runs after the request has been completely written out. + cherrypy.response.body = [ntob("razdrez")] + id = cherrypy.request.params.get("id") + if id: + self.ended[id] = True + cleanup.failsafe = True + + def _setup(self): + cherrypy.request.hooks.attach('before_finalize', self.nadsat) + cherrypy.request.hooks.attach('on_end_request', self.cleanup) + tools.nadsat = NadsatTool() + + def pipe_body(): + cherrypy.request.process_request_body = False + clen = int(cherrypy.request.headers['Content-Length']) + cherrypy.request.body = cherrypy.request.rfile.read(clen) + + # Assert that we can use a callable object instead of a function. + class Rotator(object): + + def __call__(self, scale): + r = cherrypy.response + r.collapse_body() + if six.PY3: + r.body = [bytes([(x + scale) % 256 for x in r.body[0]])] + else: + r.body = [chr((ord(x) + scale) % 256) for x in r.body[0]] + cherrypy.tools.rotator = cherrypy.Tool('before_finalize', Rotator()) + + def stream_handler(next_handler, *args, **kwargs): + assert cherrypy.request.config.get('tools.streamer.arg') == 'arg value' + cherrypy.response.output = o = io.BytesIO() + try: + response = next_handler(*args, **kwargs) + # Ignore the response and return our accumulated output + # instead. + return o.getvalue() + finally: + o.close() + cherrypy.tools.streamer = cherrypy._cptools.HandlerWrapperTool( + stream_handler) + + class Root: + + @cherrypy.expose + def index(self): + return "Howdy earth!" + + @cherrypy.expose + @cherrypy.config(**{'tools.streamer.on': True, 'tools.streamer.arg': 'arg value'}) + def tarfile(self): + assert cherrypy.request.config.get('tools.streamer.arg') == 'arg value' + cherrypy.response.output.write(ntob('I am ')) + cherrypy.response.output.write(ntob('a tarfile')) + + @cherrypy.expose + def euro(self): + hooks = list(cherrypy.request.hooks['before_finalize']) + hooks.sort() + cbnames = [x.callback.__name__ for x in hooks] + assert cbnames == ['gzip'], cbnames + priorities = [x.priority for x in hooks] + assert priorities == [80], priorities + yield ntou("Hello,") + yield ntou("world") + yield europoundUnicode + + # Bare hooks + @cherrypy.expose + @cherrypy.config(**{'hooks.before_request_body': pipe_body}) + def pipe(self): + return cherrypy.request.body + + # Multiple decorators; include kwargs just for fun. + # Note that rotator must run before gzip. + @cherrypy.expose + def decorated_euro(self, *vpath): + yield ntou("Hello,") + yield ntou("world") + yield europoundUnicode + decorated_euro = tools.gzip(compress_level=6)(decorated_euro) + decorated_euro = tools.rotator(scale=3)(decorated_euro) + + root = Root() + + class TestType(type): + """Metaclass which automatically exposes all functions in each + subclass, and adds an instance of the subclass as an attribute + of root. + """ + def __init__(cls, name, bases, dct): + type.__init__(cls, name, bases, dct) + for value in itervalues(dct): + if isinstance(value, types.FunctionType): + cherrypy.expose(value) + setattr(root, name.lower(), cls()) + Test = TestType('Test', (object,), {}) + + # METHOD ONE: + # Declare Tools in _cp_config + @cherrypy.config(**{"tools.nadsat.on": True}) + class Demo(Test): + + def index(self, id=None): + return "A good piece of cherry pie" + + def ended(self, id): + return repr(tools.nadsat.ended[id]) + + def err(self, id=None): + raise ValueError() + + def errinstream(self, id=None): + yield "nonconfidential" + raise ValueError() + yield "confidential" + + # METHOD TWO: decorator using Tool() + # We support Python 2.3, but the @-deco syntax would look like + # this: + # @tools.check_access() + def restricted(self): + return "Welcome!" + restricted = myauthtools.check_access()(restricted) + userid = restricted + + def err_in_onstart(self): + return "success!" + + @cherrypy.config(**{'response.stream': True}) + def stream(self, id=None): + for x in xrange(100000000): + yield str(x) + + conf = { + # METHOD THREE: + # Declare Tools in detached config + '/demo': { + 'tools.numerify.on': True, + 'tools.numerify.map': {ntob("pie"): ntob("3.14159")}, + }, + '/demo/restricted': { + 'request.show_tracebacks': False, + }, + '/demo/userid': { + 'request.show_tracebacks': False, + 'myauth.check_access.default': True, + }, + '/demo/errinstream': { + 'response.stream': True, + }, + '/demo/err_in_onstart': { + # Because this isn't a dict, on_start_resource will error. + 'tools.numerify.map': "pie->3.14159" + }, + # Combined tools + '/euro': { + 'tools.gzip.on': True, + 'tools.encode.on': True, + }, + # Priority specified in config + '/decorated_euro/subpath': { + 'tools.gzip.priority': 10, + }, + # Handler wrappers + '/tarfile': {'tools.streamer.on': True} + } + app = cherrypy.tree.mount(root, config=conf) + app.request_class.namespaces['myauth'] = myauthtools + + if sys.version_info >= (2, 5): + from cherrypy.test import _test_decorators + root.tooldecs = _test_decorators.ToolExamples() + + def testHookErrors(self): + self.getPage("/demo/?id=1") + # If body is "razdrez", then on_end_request is being called too early. + self.assertBody("A horrorshow lomtick of cherry 3.14159") + # If this fails, then on_end_request isn't being called at all. + time.sleep(0.1) + self.getPage("/demo/ended/1") + self.assertBody("True") + + valerr = '\n raise ValueError()\nValueError' + self.getPage("/demo/err?id=3") + # If body is "razdrez", then on_end_request is being called too early. + self.assertErrorPage(502, pattern=valerr) + # If this fails, then on_end_request isn't being called at all. + time.sleep(0.1) + self.getPage("/demo/ended/3") + self.assertBody("True") + + # If body is "razdrez", then on_end_request is being called too early. + if (cherrypy.server.protocol_version == "HTTP/1.0" or + getattr(cherrypy.server, "using_apache", False)): + self.getPage("/demo/errinstream?id=5") + # Because this error is raised after the response body has + # started, the status should not change to an error status. + self.assertStatus("200 OK") + self.assertBody("nonconfidential") + else: + # Because this error is raised after the response body has + # started, and because it's chunked output, an error is raised by + # the HTTP client when it encounters incomplete output. + self.assertRaises((ValueError, IncompleteRead), self.getPage, + "/demo/errinstream?id=5") + # If this fails, then on_end_request isn't being called at all. + time.sleep(0.1) + self.getPage("/demo/ended/5") + self.assertBody("True") + + # Test the "__call__" technique (compile-time decorator). + self.getPage("/demo/restricted") + self.assertErrorPage(401) + + # Test compile-time decorator with kwargs from config. + self.getPage("/demo/userid") + self.assertBody("Welcome!") + + def testEndRequestOnDrop(self): + old_timeout = None + try: + httpserver = cherrypy.server.httpserver + old_timeout = httpserver.timeout + except (AttributeError, IndexError): + return self.skip() + + try: + httpserver.timeout = timeout + + # Test that on_end_request is called even if the client drops. + self.persistent = True + try: + conn = self.HTTP_CONN + conn.putrequest("GET", "/demo/stream?id=9", skip_host=True) + conn.putheader("Host", self.HOST) + conn.endheaders() + # Skip the rest of the request and close the conn. This will + # cause the server's active socket to error, which *should* + # result in the request being aborted, and request.close being + # called all the way up the stack (including WSGI middleware), + # eventually calling our on_end_request hook. + finally: + self.persistent = False + time.sleep(timeout * 2) + # Test that the on_end_request hook was called. + self.getPage("/demo/ended/9") + self.assertBody("True") + finally: + if old_timeout is not None: + httpserver.timeout = old_timeout + + def testGuaranteedHooks(self): + # The 'critical' on_start_resource hook is 'failsafe' (guaranteed + # to run even if there are failures in other on_start methods). + # This is NOT true of the other hooks. + # Here, we have set up a failure in NumerifyTool.numerify_map, + # but our 'critical' hook should run and set the error to 502. + self.getPage("/demo/err_in_onstart") + self.assertErrorPage(502) + self.assertInBody( + "AttributeError: 'str' object has no attribute 'items'") + + def testCombinedTools(self): + expectedResult = (ntou("Hello,world") + + europoundUnicode).encode('utf-8') + zbuf = io.BytesIO() + zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=9) + zfile.write(expectedResult) + zfile.close() + + self.getPage("/euro", + headers=[ + ("Accept-Encoding", "gzip"), + ("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7")]) + self.assertInBody(zbuf.getvalue()[:3]) + + zbuf = io.BytesIO() + zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=6) + zfile.write(expectedResult) + zfile.close() + + self.getPage("/decorated_euro", headers=[("Accept-Encoding", "gzip")]) + self.assertInBody(zbuf.getvalue()[:3]) + + # This returns a different value because gzip's priority was + # lowered in conf, allowing the rotator to run after gzip. + # Of course, we don't want breakage in production apps, + # but it proves the priority was changed. + self.getPage("/decorated_euro/subpath", + headers=[("Accept-Encoding", "gzip")]) + if six.PY3: + self.assertInBody(bytes([(x + 3) % 256 for x in zbuf.getvalue()])) + else: + self.assertInBody(''.join([chr((ord(x) + 3) % 256) + for x in zbuf.getvalue()])) + + def testBareHooks(self): + content = "bit of a pain in me gulliver" + self.getPage("/pipe", + headers=[("Content-Length", str(len(content))), + ("Content-Type", "text/plain")], + method="POST", body=content) + self.assertBody(content) + + def testHandlerWrapperTool(self): + self.getPage("/tarfile") + self.assertBody("I am a tarfile") + + def testToolWithConfig(self): + if not sys.version_info >= (2, 5): + return self.skip("skipped (Python 2.5+ only)") + + self.getPage('/tooldecs/blah') + self.assertHeader('Content-Type', 'application/data') + + def testWarnToolOn(self): + # get + try: + cherrypy.tools.numerify.on + except AttributeError: + pass + else: + raise AssertionError("Tool.on did not error as it should have.") + + # set + try: + cherrypy.tools.numerify.on = True + except AttributeError: + pass + else: + raise AssertionError("Tool.on did not error as it should have.") + + def testDecorator(self): + @cherrypy.tools.register('on_start_resource') + def example(): + pass + self.assertTrue(isinstance(cherrypy.tools.example, cherrypy.Tool)) + self.assertEqual(cherrypy.tools.example._point, 'on_start_resource') + + @cherrypy.tools.register('before_finalize', name='renamed', priority=60) + def example(): + pass + self.assertTrue(isinstance(cherrypy.tools.renamed, cherrypy.Tool)) + self.assertEqual(cherrypy.tools.renamed._point, 'before_finalize') + self.assertEqual(cherrypy.tools.renamed._name, 'renamed') + self.assertEqual(cherrypy.tools.renamed._priority, 60) + + +class SessionAuthTest(unittest.TestCase): + + def test_login_screen_returns_bytes(self): + """ + login_screen must return bytes even if unicode parameters are passed. + Issue 1132 revealed that login_screen would return unicode if the + username and password were unicode. + """ + sa = cherrypy.lib.cptools.SessionAuth() + res = sa.login_screen(None, username=six.text_type('nobody'), + password=six.text_type('anypass')) + self.assertTrue(isinstance(res, bytes)) diff --git a/deps/cherrypy/test/test_tutorials.py b/deps/cherrypy/test/test_tutorials.py new file mode 100644 index 00000000..bfcd50f7 --- /dev/null +++ b/deps/cherrypy/test/test_tutorials.py @@ -0,0 +1,217 @@ +import sys +import imp +import types + +try: + import importlib +except Exception: + # Python 2.6 may not have it. + pass + +import six + +import cherrypy +from cherrypy.test import helper + + +class TutorialTest(helper.CPWebCase): + + @classmethod + def setup_server(cls): + """ + Mount something so the engine starts. + """ + class Dummy: + pass + cherrypy.tree.mount(Dummy()) + + @staticmethod + def load_module(name): + """ + Import or reload tutorial module as needed. + """ + target = "cherrypy.tutorial." + name + if target in sys.modules: + module = imp.reload(sys.modules[target]) + elif 'importlib' not in globals(): + module = __import__(target, globals(), locals(), ['']) + else: + module = importlib.import_module(target) + return module + + @classmethod + def setup_tutorial(cls, name, root_name, config={}): + cherrypy.config.reset() + module = cls.load_module(name) + root = getattr(module, root_name) + conf = getattr(module, 'tutconf') + class_types = type, + if six.PY2: + class_types += types.ClassType, + if isinstance(root, class_types): + root = root() + cherrypy.tree.mount(root, config=conf) + cherrypy.config.update(config) + + def test01HelloWorld(self): + self.setup_tutorial('tut01_helloworld', 'HelloWorld') + self.getPage("/") + self.assertBody('Hello world!') + + def test02ExposeMethods(self): + self.setup_tutorial('tut02_expose_methods', 'HelloWorld') + self.getPage("/show_msg") + self.assertBody('Hello world!') + + def test03GetAndPost(self): + self.setup_tutorial('tut03_get_and_post', 'WelcomePage') + + # Try different GET queries + self.getPage("/greetUser?name=Bob") + self.assertBody("Hey Bob, what's up?") + + self.getPage("/greetUser") + self.assertBody('Please enter your name here.') + + self.getPage("/greetUser?name=") + self.assertBody('No, really, enter your name here.') + + # Try the same with POST + self.getPage("/greetUser", method="POST", body="name=Bob") + self.assertBody("Hey Bob, what's up?") + + self.getPage("/greetUser", method="POST", body="name=") + self.assertBody('No, really, enter your name here.') + + def test04ComplexSite(self): + self.setup_tutorial('tut04_complex_site', 'root') + + msg = ''' +

Here are some extra useful links:

+ + + +

[Return to links page]

''' + self.getPage("/links/extra/") + self.assertBody(msg) + + def test05DerivedObjects(self): + self.setup_tutorial('tut05_derived_objects', 'HomePage') + msg = ''' + + + Another Page + + +

Another Page

+ +

+ And this is the amazing second page! +

+ + + + ''' + # the tutorial has some annoying spaces in otherwise blank lines + msg = msg.replace('\n\n', '\n \n') + msg = msg.replace('

\n\n', '

\n \n') + self.getPage("/another/") + self.assertBody(msg) + + def test06DefaultMethod(self): + self.setup_tutorial("tut06_default_method", "UsersPage") + self.getPage('/hendrik') + self.assertBody('Hendrik Mans, CherryPy co-developer & crazy German ' + '(back)') + + def test07Sessions(self): + self.setup_tutorial("tut07_sessions", "HitCounter") + + self.getPage('/') + self.assertBody( + "\n During your current session, you've viewed this" + "\n page 1 times! Your life is a patio of fun!" + "\n ") + + self.getPage('/', self.cookies) + self.assertBody( + "\n During your current session, you've viewed this" + "\n page 2 times! Your life is a patio of fun!" + "\n ") + + def test08GeneratorsAndYield(self): + self.setup_tutorial("tut08_generators_and_yield", "GeneratorDemo") + self.getPage('/') + self.assertBody('

Generators rule!

' + '

List of users:

' + 'Remi
Carlos
Hendrik
Lorenzo Lamas
' + '') + + def test09Files(self): + self.setup_tutorial("tut09_files", "FileDemo") + + # Test upload + filesize = 5 + h = [("Content-type", "multipart/form-data; boundary=x"), + ("Content-Length", str(105 + filesize))] + b = ('--x\n' + 'Content-Disposition: form-data; name="myFile"; ' + 'filename="hello.txt"\r\n' + 'Content-Type: text/plain\r\n' + '\r\n') + b += 'a' * filesize + '\n' + '--x--\n' + self.getPage('/upload', h, "POST", b) + self.assertBody(''' + + myFile length: %d
+ myFile filename: hello.txt
+ myFile mime-type: text/plain + + ''' % filesize) + + # Test download + self.getPage('/download') + self.assertStatus("200 OK") + self.assertHeader("Content-Type", "application/x-download") + self.assertHeader("Content-Disposition", + # Make sure the filename is quoted. + 'attachment; filename="pdf_file.pdf"') + self.assertEqual(len(self.body), 85698) + + def test10HTTPErrors(self): + self.setup_tutorial("tut10_http_errors", "HTTPErrorDemo") + + @cherrypy.expose + def traceback_setting(): + return repr(cherrypy.request.show_tracebacks) + cherrypy.tree.mount(traceback_setting, '/traceback_setting') + + self.getPage("/") + self.assertInBody("""""") + self.assertInBody("""""") + self.assertInBody("""""") + self.assertInBody("""""") + self.assertInBody("""""") + + self.getPage("/traceback_setting") + setting = self.body + self.getPage("/toggleTracebacks") + self.assertStatus((302, 303)) + self.getPage("/traceback_setting") + self.assertBody(str(not eval(setting))) + + self.getPage("/error?code=500") + self.assertStatus(500) + self.assertInBody("The server encountered an unexpected condition " + "which prevented it from fulfilling the request.") + + self.getPage("/error?code=403") + self.assertStatus(403) + self.assertInBody("

You can't do that!

") + + self.getPage("/messageArg") + self.assertStatus(500) + self.assertInBody("If you construct an HTTPError with a 'message'") diff --git a/deps/cherrypy/test/test_virtualhost.py b/deps/cherrypy/test/test_virtualhost.py new file mode 100644 index 00000000..a495cbb3 --- /dev/null +++ b/deps/cherrypy/test/test_virtualhost.py @@ -0,0 +1,112 @@ +import os +curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + +import cherrypy +from cherrypy.test import helper + + +class VirtualHostTest(helper.CPWebCase): + + @staticmethod + def setup_server(): + class Root: + + @cherrypy.expose + def index(self): + return "Hello, world" + + @cherrypy.expose + def dom4(self): + return "Under construction" + + @cherrypy.expose + def method(self, value): + return "You sent %s" % value + + class VHost: + + def __init__(self, sitename): + self.sitename = sitename + + @cherrypy.expose + def index(self): + return "Welcome to %s" % self.sitename + + @cherrypy.expose + def vmethod(self, value): + return "You sent %s" % value + + @cherrypy.expose + def url(self): + return cherrypy.url("nextpage") + + # Test static as a handler (section must NOT include vhost prefix) + static = cherrypy.tools.staticdir.handler( + section='/static', dir=curdir) + + root = Root() + root.mydom2 = VHost("Domain 2") + root.mydom3 = VHost("Domain 3") + hostmap = {'www.mydom2.com': '/mydom2', + 'www.mydom3.com': '/mydom3', + 'www.mydom4.com': '/dom4', + } + cherrypy.tree.mount(root, config={ + '/': { + 'request.dispatch': cherrypy.dispatch.VirtualHost(**hostmap) + }, + # Test static in config (section must include vhost prefix) + '/mydom2/static2': { + 'tools.staticdir.on': True, + 'tools.staticdir.root': curdir, + 'tools.staticdir.dir': 'static', + 'tools.staticdir.index': 'index.html', + }, + }) + + def testVirtualHost(self): + self.getPage("/", [('Host', 'www.mydom1.com')]) + self.assertBody('Hello, world') + self.getPage("/mydom2/", [('Host', 'www.mydom1.com')]) + self.assertBody('Welcome to Domain 2') + + self.getPage("/", [('Host', 'www.mydom2.com')]) + self.assertBody('Welcome to Domain 2') + self.getPage("/", [('Host', 'www.mydom3.com')]) + self.assertBody('Welcome to Domain 3') + self.getPage("/", [('Host', 'www.mydom4.com')]) + self.assertBody('Under construction') + + # Test GET, POST, and positional params + self.getPage("/method?value=root") + self.assertBody("You sent root") + self.getPage("/vmethod?value=dom2+GET", [('Host', 'www.mydom2.com')]) + self.assertBody("You sent dom2 GET") + self.getPage("/vmethod", [('Host', 'www.mydom3.com')], method="POST", + body="value=dom3+POST") + self.assertBody("You sent dom3 POST") + self.getPage("/vmethod/pos", [('Host', 'www.mydom3.com')]) + self.assertBody("You sent pos") + + # Test that cherrypy.url uses the browser url, not the virtual url + self.getPage("/url", [('Host', 'www.mydom2.com')]) + self.assertBody("%s://www.mydom2.com/nextpage" % self.scheme) + + def test_VHost_plus_Static(self): + # Test static as a handler + self.getPage("/static/style.css", [('Host', 'www.mydom2.com')]) + self.assertStatus('200 OK') + self.assertHeader('Content-Type', 'text/css;charset=utf-8') + + # Test static in config + self.getPage("/static2/dirback.jpg", [('Host', 'www.mydom2.com')]) + self.assertStatus('200 OK') + self.assertHeaderIn('Content-Type', ['image/jpeg', 'image/pjpeg']) + + # Test static config with "index" arg + self.getPage("/static2/", [('Host', 'www.mydom2.com')]) + self.assertStatus('200 OK') + self.assertBody('Hello, world\r\n') + # Since tools.trailing_slash is on by default, this should redirect + self.getPage("/static2", [('Host', 'www.mydom2.com')]) + self.assertStatus(301) diff --git a/deps/cherrypy/test/test_wsgi_ns.py b/deps/cherrypy/test/test_wsgi_ns.py new file mode 100644 index 00000000..2d333e03 --- /dev/null +++ b/deps/cherrypy/test/test_wsgi_ns.py @@ -0,0 +1,94 @@ +import cherrypy +from cherrypy._cpcompat import ntob +from cherrypy.test import helper + + +class WSGI_Namespace_Test(helper.CPWebCase): + + @staticmethod + def setup_server(): + + class WSGIResponse(object): + + def __init__(self, appresults): + self.appresults = appresults + self.iter = iter(appresults) + + def __iter__(self): + return self + + def next(self): + return self.iter.next() + + def __next__(self): + return next(self.iter) + + def close(self): + if hasattr(self.appresults, "close"): + self.appresults.close() + + class ChangeCase(object): + + def __init__(self, app, to=None): + self.app = app + self.to = to + + def __call__(self, environ, start_response): + res = self.app(environ, start_response) + + class CaseResults(WSGIResponse): + + def next(this): + return getattr(this.iter.next(), self.to)() + + def __next__(this): + return getattr(next(this.iter), self.to)() + return CaseResults(res) + + class Replacer(object): + + def __init__(self, app, map={}): + self.app = app + self.map = map + + def __call__(self, environ, start_response): + res = self.app(environ, start_response) + + class ReplaceResults(WSGIResponse): + + def next(this): + line = this.iter.next() + for k, v in self.map.iteritems(): + line = line.replace(k, v) + return line + + def __next__(this): + line = next(this.iter) + for k, v in self.map.items(): + line = line.replace(k, v) + return line + return ReplaceResults(res) + + class Root(object): + + @cherrypy.expose + def index(self): + return "HellO WoRlD!" + + root_conf = {'wsgi.pipeline': [('replace', Replacer)], + 'wsgi.replace.map': {ntob('L'): ntob('X'), + ntob('l'): ntob('r')}, + } + + app = cherrypy.Application(Root()) + app.wsgiapp.pipeline.append(('changecase', ChangeCase)) + app.wsgiapp.config['changecase'] = {'to': 'upper'} + cherrypy.tree.mount(app, config={'/': root_conf}) + + def test_pipeline(self): + if not cherrypy.server.httpserver: + return self.skip() + + self.getPage("/") + # If body is "HEXXO WORXD!", the middleware was applied out of order. + self.assertBody("HERRO WORRD!") diff --git a/deps/cherrypy/test/test_wsgi_unix_socket.py b/deps/cherrypy/test/test_wsgi_unix_socket.py new file mode 100644 index 00000000..7a829d51 --- /dev/null +++ b/deps/cherrypy/test/test_wsgi_unix_socket.py @@ -0,0 +1,106 @@ +import os +import sys +import socket +import atexit +import tempfile + +import cherrypy +from cherrypy.test import helper +from cherrypy._cpcompat import HTTPConnection + +def usocket_path(): + fd, path = tempfile.mkstemp('cp_test.sock') + os.close(fd) + os.remove(path) + return path + +USOCKET_PATH = usocket_path() + +class USocketHTTPConnection(HTTPConnection): + """ + HTTPConnection over a unix socket. + """ + + def __init__(self, path): + HTTPConnection.__init__(self, 'localhost') + self.path = path + + def __call__(self, *args, **kwargs): + """ + Catch-all method just to present itself as a constructor for the + HTTPConnection. + """ + return self + + def connect(self): + """ + Override the connect method and assign a unix socket as a transport. + """ + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.connect(self.path) + self.sock = sock + atexit.register(lambda: os.remove(self.path)) + + +def skip_on_windows(method): + """ + Decorator to skip the method call if the test is executing on Windows. + """ + def wrapper(self): + if sys.platform == "win32": + return self.skip("No UNIX Socket support in Windows.") + else: + return method(self) + wrapper.__doc__ = method.__doc__ + wrapper.__name__ = method.__name__ + return wrapper + + + +class WSGI_UnixSocket_Test(helper.CPWebCase): + """ + Test basic behavior on a cherrypy wsgi server listening + on a unix socket. + + It exercises the config option `server.socket_file`. + """ + HTTP_CONN = USocketHTTPConnection(USOCKET_PATH) + + + @staticmethod + def setup_server(): + class Root(object): + + @cherrypy.expose + def index(self): + return "Test OK" + + @cherrypy.expose + def error(self): + raise Exception("Invalid page") + + config = { + 'server.socket_file': USOCKET_PATH + } + cherrypy.config.update(config) + cherrypy.tree.mount(Root()) + + def tearDown(self): + cherrypy.config.update({'server.socket_file': None}) + + @skip_on_windows + def test_simple_request(self): + self.getPage("/") + self.assertStatus("200 OK") + self.assertInBody("Test OK") + + @skip_on_windows + def test_not_found(self): + self.getPage("/invalid_path") + self.assertStatus("404 Not Found") + + @skip_on_windows + def test_internal_error(self): + self.getPage("/error") + self.assertStatus("500 Internal Server Error") + self.assertInBody("Invalid page") diff --git a/deps/cherrypy/test/test_wsgi_vhost.py b/deps/cherrypy/test/test_wsgi_vhost.py new file mode 100644 index 00000000..2c9ddfc2 --- /dev/null +++ b/deps/cherrypy/test/test_wsgi_vhost.py @@ -0,0 +1,35 @@ +import cherrypy +from cherrypy.test import helper + + +class WSGI_VirtualHost_Test(helper.CPWebCase): + + @staticmethod + def setup_server(): + + class ClassOfRoot(object): + + def __init__(self, name): + self.name = name + + @cherrypy.expose + def index(self): + return "Welcome to the %s website!" % self.name + + default = cherrypy.Application(None) + + domains = {} + for year in range(1997, 2008): + app = cherrypy.Application(ClassOfRoot('Class of %s' % year)) + domains['www.classof%s.example' % year] = app + + cherrypy.tree.graft(cherrypy._cpwsgi.VirtualHost(default, domains)) + + def test_welcome(self): + if not cherrypy.server.using_wsgi: + return self.skip("skipped (not using WSGI)... ") + + for year in range(1997, 2008): + self.getPage( + "/", headers=[('Host', 'www.classof%s.example' % year)]) + self.assertBody("Welcome to the Class of %s website!" % year) diff --git a/deps/cherrypy/test/test_wsgiapps.py b/deps/cherrypy/test/test_wsgiapps.py new file mode 100644 index 00000000..104d2b66 --- /dev/null +++ b/deps/cherrypy/test/test_wsgiapps.py @@ -0,0 +1,126 @@ +import sys + +from cherrypy._cpcompat import ntob +from cherrypy.test import helper + + +class WSGIGraftTests(helper.CPWebCase): + + @staticmethod + def setup_server(): + import os + curdir = os.path.join(os.getcwd(), os.path.dirname(__file__)) + + import cherrypy + + def test_app(environ, start_response): + status = '200 OK' + response_headers = [('Content-type', 'text/plain')] + start_response(status, response_headers) + output = ['Hello, world!\n', + 'This is a wsgi app running within CherryPy!\n\n'] + keys = list(environ.keys()) + keys.sort() + for k in keys: + output.append('%s: %s\n' % (k, environ[k])) + return [ntob(x, 'utf-8') for x in output] + + def test_empty_string_app(environ, start_response): + status = '200 OK' + response_headers = [('Content-type', 'text/plain')] + start_response(status, response_headers) + return [ + ntob('Hello'), ntob(''), ntob(' '), ntob(''), ntob('world') + ] + + class WSGIResponse(object): + + def __init__(self, appresults): + self.appresults = appresults + self.iter = iter(appresults) + + def __iter__(self): + return self + + if sys.version_info >= (3, 0): + def __next__(self): + return next(self.iter) + else: + def next(self): + return self.iter.next() + + def close(self): + if hasattr(self.appresults, "close"): + self.appresults.close() + + class ReversingMiddleware(object): + + def __init__(self, app): + self.app = app + + def __call__(self, environ, start_response): + results = app(environ, start_response) + + class Reverser(WSGIResponse): + + if sys.version_info >= (3, 0): + def __next__(this): + line = list(next(this.iter)) + line.reverse() + return bytes(line) + else: + def next(this): + line = list(this.iter.next()) + line.reverse() + return "".join(line) + + return Reverser(results) + + class Root: + + @cherrypy.expose + def index(self): + return ntob("I'm a regular CherryPy page handler!") + + cherrypy.tree.mount(Root()) + + cherrypy.tree.graft(test_app, '/hosted/app1') + cherrypy.tree.graft(test_empty_string_app, '/hosted/app3') + + # Set script_name explicitly to None to signal CP that it should + # be pulled from the WSGI environ each time. + app = cherrypy.Application(Root(), script_name=None) + cherrypy.tree.graft(ReversingMiddleware(app), '/hosted/app2') + + wsgi_output = '''Hello, world! +This is a wsgi app running within CherryPy!''' + + def test_01_standard_app(self): + self.getPage("/") + self.assertBody("I'm a regular CherryPy page handler!") + + def test_04_pure_wsgi(self): + import cherrypy + if not cherrypy.server.using_wsgi: + return self.skip("skipped (not using WSGI)... ") + self.getPage("/hosted/app1") + self.assertHeader("Content-Type", "text/plain") + self.assertInBody(self.wsgi_output) + + def test_05_wrapped_cp_app(self): + import cherrypy + if not cherrypy.server.using_wsgi: + return self.skip("skipped (not using WSGI)... ") + self.getPage("/hosted/app2/") + body = list("I'm a regular CherryPy page handler!") + body.reverse() + body = "".join(body) + self.assertInBody(body) + + def test_06_empty_string_app(self): + import cherrypy + if not cherrypy.server.using_wsgi: + return self.skip("skipped (not using WSGI)... ") + self.getPage("/hosted/app3") + self.assertHeader("Content-Type", "text/plain") + self.assertInBody('Hello world') diff --git a/deps/cherrypy/test/test_xmlrpc.py b/deps/cherrypy/test/test_xmlrpc.py new file mode 100644 index 00000000..e95d5d79 --- /dev/null +++ b/deps/cherrypy/test/test_xmlrpc.py @@ -0,0 +1,187 @@ +import sys + +import six + +try: + from xmlrpclib import DateTime, Fault, ProtocolError, ServerProxy + from xmlrpclib import SafeTransport +except ImportError: + from xmlrpc.client import DateTime, Fault, ProtocolError, ServerProxy + from xmlrpc.client import SafeTransport + +if six.PY3: + HTTPSTransport = SafeTransport + + # Python 3.0's SafeTransport still mistakenly checks for socket.ssl + import socket + if not hasattr(socket, "ssl"): + socket.ssl = True +else: + class HTTPSTransport(SafeTransport): + + """Subclass of SafeTransport to fix sock.recv errors (by using file). + """ + + def request(self, host, handler, request_body, verbose=0): + # issue XML-RPC request + h = self.make_connection(host) + if verbose: + h.set_debuglevel(1) + + self.send_request(h, handler, request_body) + self.send_host(h, host) + self.send_user_agent(h) + self.send_content(h, request_body) + + errcode, errmsg, headers = h.getreply() + if errcode != 200: + raise ProtocolError(host + handler, errcode, errmsg, headers) + + self.verbose = verbose + + # Here's where we differ from the superclass. It says: + # try: + # sock = h._conn.sock + # except AttributeError: + # sock = None + # return self._parse_response(h.getfile(), sock) + + return self.parse_response(h.getfile()) + +import cherrypy + + +def setup_server(): + from cherrypy import _cptools + + class Root: + + @cherrypy.expose + def index(self): + return "I'm a standard index!" + + class XmlRpc(_cptools.XMLRPCController): + + @cherrypy.expose + def foo(self): + return "Hello world!" + + @cherrypy.expose + def return_single_item_list(self): + return [42] + + @cherrypy.expose + def return_string(self): + return "here is a string" + + @cherrypy.expose + def return_tuple(self): + return ('here', 'is', 1, 'tuple') + + @cherrypy.expose + def return_dict(self): + return dict(a=1, b=2, c=3) + + @cherrypy.expose + def return_composite(self): + return dict(a=1, z=26), 'hi', ['welcome', 'friend'] + + @cherrypy.expose + def return_int(self): + return 42 + + @cherrypy.expose + def return_float(self): + return 3.14 + + @cherrypy.expose + def return_datetime(self): + return DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1)) + + @cherrypy.expose + def return_boolean(self): + return True + + @cherrypy.expose + def test_argument_passing(self, num): + return num * 2 + + @cherrypy.expose + def test_returning_Fault(self): + return Fault(1, "custom Fault response") + + root = Root() + root.xmlrpc = XmlRpc() + cherrypy.tree.mount(root, config={'/': { + 'request.dispatch': cherrypy.dispatch.XMLRPCDispatcher(), + 'tools.xmlrpc.allow_none': 0, + }}) + + +from cherrypy.test import helper + + +class XmlRpcTest(helper.CPWebCase): + setup_server = staticmethod(setup_server) + + def testXmlRpc(self): + + scheme = self.scheme + if scheme == "https": + url = 'https://%s:%s/xmlrpc/' % (self.interface(), self.PORT) + proxy = ServerProxy(url, transport=HTTPSTransport()) + else: + url = 'http://%s:%s/xmlrpc/' % (self.interface(), self.PORT) + proxy = ServerProxy(url) + + # begin the tests ... + self.getPage("/xmlrpc/foo") + self.assertBody("Hello world!") + + self.assertEqual(proxy.return_single_item_list(), [42]) + self.assertNotEqual(proxy.return_single_item_list(), 'one bazillion') + self.assertEqual(proxy.return_string(), "here is a string") + self.assertEqual(proxy.return_tuple(), + list(('here', 'is', 1, 'tuple'))) + self.assertEqual(proxy.return_dict(), {'a': 1, 'c': 3, 'b': 2}) + self.assertEqual(proxy.return_composite(), + [{'a': 1, 'z': 26}, 'hi', ['welcome', 'friend']]) + self.assertEqual(proxy.return_int(), 42) + self.assertEqual(proxy.return_float(), 3.14) + self.assertEqual(proxy.return_datetime(), + DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))) + self.assertEqual(proxy.return_boolean(), True) + self.assertEqual(proxy.test_argument_passing(22), 22 * 2) + + # Test an error in the page handler (should raise an xmlrpclib.Fault) + try: + proxy.test_argument_passing({}) + except Exception: + x = sys.exc_info()[1] + self.assertEqual(x.__class__, Fault) + self.assertEqual(x.faultString, ("unsupported operand type(s) " + "for *: 'dict' and 'int'")) + else: + self.fail("Expected xmlrpclib.Fault") + + # https://github.com/cherrypy/cherrypy/issues/533 + # if a method is not found, an xmlrpclib.Fault should be raised + try: + proxy.non_method() + except Exception: + x = sys.exc_info()[1] + self.assertEqual(x.__class__, Fault) + self.assertEqual(x.faultString, + 'method "non_method" is not supported') + else: + self.fail("Expected xmlrpclib.Fault") + + # Test returning a Fault from the page handler. + try: + proxy.test_returning_Fault() + except Exception: + x = sys.exc_info()[1] + self.assertEqual(x.__class__, Fault) + self.assertEqual(x.faultString, ("custom Fault response")) + else: + self.fail("Expected xmlrpclib.Fault") diff --git a/deps/cherrypy/test/webtest.py b/deps/cherrypy/test/webtest.py new file mode 100644 index 00000000..8e8352be --- /dev/null +++ b/deps/cherrypy/test/webtest.py @@ -0,0 +1,612 @@ +"""Extensions to unittest for web frameworks. + +Use the WebCase.getPage method to request a page from your HTTP server. + +Framework Integration +===================== + +If you have control over your server process, you can handle errors +in the server-side of the HTTP conversation a bit better. You must run +both the client (your WebCase tests) and the server in the same process +(but in separate threads, obviously). + +When an error occurs in the framework, call server_error. It will print +the traceback to stdout, and keep any assertions you have from running +(the assumption is that, if the server errors, the page output will not +be of further significance to your tests). +""" + +import pprint +import re +import socket +import sys +import time +import traceback +import types +import os +import json + +import unittest + +import six + +from cherrypy._cpcompat import text_or_bytes, HTTPConnection +from cherrypy._cpcompat import HTTPSConnection + + +def interface(host): + """Return an IP address for a client connection given the server host. + + If the server is listening on '0.0.0.0' (INADDR_ANY) + or '::' (IN6ADDR_ANY), this will return the proper localhost.""" + if host == '0.0.0.0': + # INADDR_ANY, which should respond on localhost. + return "127.0.0.1" + if host == '::': + # IN6ADDR_ANY, which should respond on localhost. + return "::1" + return host + + +class TerseTestResult(unittest._TextTestResult): + + def printErrors(self): + # Overridden to avoid unnecessary empty line + if self.errors or self.failures: + if self.dots or self.showAll: + self.stream.writeln() + self.printErrorList('ERROR', self.errors) + self.printErrorList('FAIL', self.failures) + + +class TerseTestRunner(unittest.TextTestRunner): + + """A test runner class that displays results in textual form.""" + + def _makeResult(self): + return TerseTestResult(self.stream, self.descriptions, self.verbosity) + + def run(self, test): + "Run the given test case or test suite." + # Overridden to remove unnecessary empty lines and separators + result = self._makeResult() + test(result) + result.printErrors() + if not result.wasSuccessful(): + self.stream.write("FAILED (") + failed, errored = list(map(len, (result.failures, result.errors))) + if failed: + self.stream.write("failures=%d" % failed) + if errored: + if failed: + self.stream.write(", ") + self.stream.write("errors=%d" % errored) + self.stream.writeln(")") + return result + + +class ReloadingTestLoader(unittest.TestLoader): + + def loadTestsFromName(self, name, module=None): + """Return a suite of all tests cases given a string specifier. + + The name may resolve either to a module, a test case class, a + test method within a test case class, or a callable object which + returns a TestCase or TestSuite instance. + + The method optionally resolves the names relative to a given module. + """ + parts = name.split('.') + unused_parts = [] + if module is None: + if not parts: + raise ValueError("incomplete test name: %s" % name) + else: + parts_copy = parts[:] + while parts_copy: + target = ".".join(parts_copy) + if target in sys.modules: + module = reload(sys.modules[target]) + parts = unused_parts + break + else: + try: + module = __import__(target) + parts = unused_parts + break + except ImportError: + unused_parts.insert(0, parts_copy[-1]) + del parts_copy[-1] + if not parts_copy: + raise + parts = parts[1:] + obj = module + for part in parts: + obj = getattr(obj, part) + + if isinstance(obj, types.ModuleType): + return self.loadTestsFromModule(obj) + elif (((six.PY3 and isinstance(obj, type)) + or isinstance(obj, (type, types.ClassType))) + and issubclass(obj, unittest.TestCase)): + return self.loadTestsFromTestCase(obj) + elif isinstance(obj, types.UnboundMethodType): + if six.PY3: + return obj.__self__.__class__(obj.__name__) + else: + return obj.im_class(obj.__name__) + elif hasattr(obj, '__call__'): + test = obj() + if not isinstance(test, unittest.TestCase) and \ + not isinstance(test, unittest.TestSuite): + raise ValueError("calling %s returned %s, " + "not a test" % (obj, test)) + return test + else: + raise ValueError("do not know how to make test from: %s" % obj) + + +try: + # Jython support + if sys.platform[:4] == 'java': + def getchar(): + # Hopefully this is enough + return sys.stdin.read(1) + else: + # On Windows, msvcrt.getch reads a single char without output. + import msvcrt + + def getchar(): + return msvcrt.getch() +except ImportError: + # Unix getchr + import tty + import termios + + def getchar(): + fd = sys.stdin.fileno() + old_settings = termios.tcgetattr(fd) + try: + tty.setraw(sys.stdin.fileno()) + ch = sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + return ch + + +# from jaraco.properties +class NonDataProperty(object): + def __init__(self, fget): + assert fget is not None, "fget cannot be none" + assert callable(fget), "fget must be callable" + self.fget = fget + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self.fget(obj) + + +class WebCase(unittest.TestCase): + HOST = "127.0.0.1" + PORT = 8000 + HTTP_CONN = HTTPConnection + PROTOCOL = "HTTP/1.1" + + scheme = "http" + url = None + + status = None + headers = None + body = None + + encoding = 'utf-8' + + time = None + + def get_conn(self, auto_open=False): + """Return a connection to our HTTP server.""" + if self.scheme == "https": + cls = HTTPSConnection + else: + cls = HTTPConnection + conn = cls(self.interface(), self.PORT) + # Automatically re-connect? + conn.auto_open = auto_open + conn.connect() + return conn + + def set_persistent(self, on=True, auto_open=False): + """Make our HTTP_CONN persistent (or not). + + If the 'on' argument is True (the default), then self.HTTP_CONN + will be set to an instance of HTTPConnection (or HTTPS + if self.scheme is "https"). This will then persist across requests. + + We only allow for a single open connection, so if you call this + and we currently have an open connection, it will be closed. + """ + try: + self.HTTP_CONN.close() + except (TypeError, AttributeError): + pass + + if on: + self.HTTP_CONN = self.get_conn(auto_open=auto_open) + else: + if self.scheme == "https": + self.HTTP_CONN = HTTPSConnection + else: + self.HTTP_CONN = HTTPConnection + + def _get_persistent(self): + return hasattr(self.HTTP_CONN, "__class__") + + def _set_persistent(self, on): + self.set_persistent(on) + persistent = property(_get_persistent, _set_persistent) + + def interface(self): + """Return an IP address for a client connection. + + If the server is listening on '0.0.0.0' (INADDR_ANY) + or '::' (IN6ADDR_ANY), this will return the proper localhost.""" + return interface(self.HOST) + + def getPage(self, url, headers=None, method="GET", body=None, + protocol=None, raise_subcls=None): + """Open the url with debugging support. Return status, headers, body. + + `raise_subcls` must be a tuple with the exceptions classes + or a single exception class that are not going to be considered + a socket.error regardless that they were are subclass of a + socket.error and therefore not considered for a connection retry. + """ + ServerError.on = False + + if isinstance(url, six.text_type): + url = url.encode('utf-8') + if isinstance(body, six.text_type): + body = body.encode('utf-8') + + self.url = url + self.time = None + start = time.time() + result = openURL(url, headers, method, body, self.HOST, self.PORT, + self.HTTP_CONN, protocol or self.PROTOCOL, + raise_subcls) + self.time = time.time() - start + self.status, self.headers, self.body = result + + # Build a list of request cookies from the previous response cookies. + self.cookies = [('Cookie', v) for k, v in self.headers + if k.lower() == 'set-cookie'] + + if ServerError.on: + raise ServerError() + return result + + @NonDataProperty + def interactive(self): + """ + Load interactivity setting from environment, where + the value can be numeric or a string like true or + False or 1 or 0. + """ + env_str = os.environ.get('WEBTEST_INTERACTIVE', 'True') + return bool(json.loads(env_str.lower())) + + console_height = 30 + + def _handlewebError(self, msg): + print("") + print(" ERROR: %s" % msg) + + if not self.interactive: + raise self.failureException(msg) + + p = (" Show: " + "[B]ody [H]eaders [S]tatus [U]RL; " + "[I]gnore, [R]aise, or sys.e[X]it >> ") + sys.stdout.write(p) + sys.stdout.flush() + while True: + i = getchar().upper() + if not isinstance(i, type("")): + i = i.decode('ascii') + if i not in "BHSUIRX": + continue + print(i.upper()) # Also prints new line + if i == "B": + for x, line in enumerate(self.body.splitlines()): + if (x + 1) % self.console_height == 0: + # The \r and comma should make the next line overwrite + sys.stdout.write("<-- More -->\r") + m = getchar().lower() + # Erase our "More" prompt + sys.stdout.write(" \r") + if m == "q": + break + print(line) + elif i == "H": + pprint.pprint(self.headers) + elif i == "S": + print(self.status) + elif i == "U": + print(self.url) + elif i == "I": + # return without raising the normal exception + return + elif i == "R": + raise self.failureException(msg) + elif i == "X": + self.exit() + sys.stdout.write(p) + sys.stdout.flush() + + def exit(self): + sys.exit() + + def assertStatus(self, status, msg=None): + """Fail if self.status != status.""" + if isinstance(status, text_or_bytes): + if not self.status == status: + if msg is None: + msg = 'Status (%r) != %r' % (self.status, status) + self._handlewebError(msg) + elif isinstance(status, int): + code = int(self.status[:3]) + if code != status: + if msg is None: + msg = 'Status (%r) != %r' % (self.status, status) + self._handlewebError(msg) + else: + # status is a tuple or list. + match = False + for s in status: + if isinstance(s, text_or_bytes): + if self.status == s: + match = True + break + elif int(self.status[:3]) == s: + match = True + break + if not match: + if msg is None: + msg = 'Status (%r) not in %r' % (self.status, status) + self._handlewebError(msg) + + def assertHeader(self, key, value=None, msg=None): + """Fail if (key, [value]) not in self.headers.""" + lowkey = key.lower() + for k, v in self.headers: + if k.lower() == lowkey: + if value is None or str(value) == v: + return v + + if msg is None: + if value is None: + msg = '%r not in headers' % key + else: + msg = '%r:%r not in headers' % (key, value) + self._handlewebError(msg) + + def assertHeaderIn(self, key, values, msg=None): + """Fail if header indicated by key doesn't have one of the values.""" + lowkey = key.lower() + for k, v in self.headers: + if k.lower() == lowkey: + matches = [value for value in values if str(value) == v] + if matches: + return matches + + if msg is None: + msg = '%(key)r not in %(values)r' % vars() + self._handlewebError(msg) + + def assertHeaderItemValue(self, key, value, msg=None): + """Fail if the header does not contain the specified value""" + actual_value = self.assertHeader(key, msg=msg) + header_values = map(str.strip, actual_value.split(',')) + if value in header_values: + return value + + if msg is None: + msg = "%r not in %r" % (value, header_values) + self._handlewebError(msg) + + def assertNoHeader(self, key, msg=None): + """Fail if key in self.headers.""" + lowkey = key.lower() + matches = [k for k, v in self.headers if k.lower() == lowkey] + if matches: + if msg is None: + msg = '%r in headers' % key + self._handlewebError(msg) + + def assertBody(self, value, msg=None): + """Fail if value != self.body.""" + if isinstance(value, six.text_type): + value = value.encode(self.encoding) + if value != self.body: + if msg is None: + msg = 'expected body:\n%r\n\nactual body:\n%r' % ( + value, self.body) + self._handlewebError(msg) + + def assertInBody(self, value, msg=None): + """Fail if value not in self.body.""" + if isinstance(value, six.text_type): + value = value.encode(self.encoding) + if value not in self.body: + if msg is None: + msg = '%r not in body: %s' % (value, self.body) + self._handlewebError(msg) + + def assertNotInBody(self, value, msg=None): + """Fail if value in self.body.""" + if isinstance(value, six.text_type): + value = value.encode(self.encoding) + if value in self.body: + if msg is None: + msg = '%r found in body' % value + self._handlewebError(msg) + + def assertMatchesBody(self, pattern, msg=None, flags=0): + """Fail if value (a regex pattern) is not in self.body.""" + if isinstance(pattern, six.text_type): + pattern = pattern.encode(self.encoding) + if re.search(pattern, self.body, flags) is None: + if msg is None: + msg = 'No match for %r in body' % pattern + self._handlewebError(msg) + + +methods_with_bodies = ("POST", "PUT") + + +def cleanHeaders(headers, method, body, host, port): + """Return request headers, with required headers added (if missing).""" + if headers is None: + headers = [] + + # Add the required Host request header if not present. + # [This specifies the host:port of the server, not the client.] + found = False + for k, v in headers: + if k.lower() == 'host': + found = True + break + if not found: + if port == 80: + headers.append(("Host", host)) + else: + headers.append(("Host", "%s:%s" % (host, port))) + + if method in methods_with_bodies: + # Stick in default type and length headers if not present + found = False + for k, v in headers: + if k.lower() == 'content-type': + found = True + break + if not found: + headers.append( + ("Content-Type", "application/x-www-form-urlencoded")) + headers.append(("Content-Length", str(len(body or "")))) + + return headers + + +def shb(response): + """Return status, headers, body the way we like from a response.""" + if six.PY3: + h = response.getheaders() + else: + h = [] + key, value = None, None + for line in response.msg.headers: + if line: + if line[0] in " \t": + value += line.strip() + else: + if key and value: + h.append((key, value)) + key, value = line.split(":", 1) + key = key.strip() + value = value.strip() + if key and value: + h.append((key, value)) + + return "%s %s" % (response.status, response.reason), h, response.read() + + +def openURL(url, headers=None, method="GET", body=None, + host="127.0.0.1", port=8000, http_conn=HTTPConnection, + protocol="HTTP/1.1", raise_subcls=None): + """ + Open the given HTTP resource and return status, headers, and body. + + `raise_subcls` must be a tuple with the exceptions classes + or a single exception class that are not going to be considered + a socket.error regardless that they were are subclass of a + socket.error and therefore not considered for a connection retry. + """ + headers = cleanHeaders(headers, method, body, host, port) + + # Trying 10 times is simply in case of socket errors. + # Normal case--it should run once. + for trial in range(10): + try: + # Allow http_conn to be a class or an instance + if hasattr(http_conn, "host"): + conn = http_conn + else: + conn = http_conn(interface(host), port) + + conn._http_vsn_str = protocol + conn._http_vsn = int("".join([x for x in protocol if x.isdigit()])) + + if six.PY3 and isinstance(url, bytes): + url = url.decode() + conn.putrequest(method.upper(), url, skip_host=True, + skip_accept_encoding=True) + + for key, value in headers: + conn.putheader(key, value.encode("Latin-1")) + conn.endheaders() + + if body is not None: + conn.send(body) + + # Handle response + response = conn.getresponse() + + s, h, b = shb(response) + + if not hasattr(http_conn, "host"): + # We made our own conn instance. Close it. + conn.close() + + return s, h, b + except socket.error as e: + if raise_subcls is not None and isinstance(e, raise_subcls): + raise + else: + time.sleep(0.5) + if trial == 9: + raise + + + + +# Add any exceptions which your web framework handles +# normally (that you don't want server_error to trap). +ignored_exceptions = [] + +# You'll want set this to True when you can't guarantee +# that each response will immediately follow each request; +# for example, when handling requests via multiple threads. +ignore_all = False + + +class ServerError(Exception): + on = False + + +def server_error(exc=None): + """Server debug hook. Return True if exception handled, False if ignored. + + You probably want to wrap this, so you can still handle an error using + your framework when it's ignored. + """ + if exc is None: + exc = sys.exc_info() + + if ignore_all or exc[0] in ignored_exceptions: + return False + else: + ServerError.on = True + print("") + print("".join(traceback.format_exception(*exc))) + return True diff --git a/deps/cherrypy/tutorial/__init__.py b/deps/cherrypy/tutorial/__init__.py new file mode 100644 index 00000000..08c142c5 --- /dev/null +++ b/deps/cherrypy/tutorial/__init__.py @@ -0,0 +1,3 @@ + +# This is used in test_config to test unrepr of "from A import B" +thing2 = object() diff --git a/deps/cherrypy/tutorial/__pycache__/__init__.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..b5959f38 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-34.pyc new file mode 100644 index 00000000..86e04446 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-34.pyc new file mode 100644 index 00000000..2aac0d83 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-34.pyc new file mode 100644 index 00000000..c15ed569 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-34.pyc new file mode 100644 index 00000000..914a2fcf Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-34.pyc new file mode 100644 index 00000000..5daff958 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut06_default_method.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut06_default_method.cpython-34.pyc new file mode 100644 index 00000000..bff4c6b4 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut06_default_method.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut07_sessions.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut07_sessions.cpython-34.pyc new file mode 100644 index 00000000..b51b066e Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut07_sessions.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-34.pyc new file mode 100644 index 00000000..cf72d43d Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut09_files.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut09_files.cpython-34.pyc new file mode 100644 index 00000000..e2231148 Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut09_files.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-34.pyc b/deps/cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-34.pyc new file mode 100644 index 00000000..b9495b1a Binary files /dev/null and b/deps/cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-34.pyc differ diff --git a/deps/cherrypy/tutorial/tut01_helloworld.py b/deps/cherrypy/tutorial/tut01_helloworld.py new file mode 100644 index 00000000..aaf89fd9 --- /dev/null +++ b/deps/cherrypy/tutorial/tut01_helloworld.py @@ -0,0 +1,33 @@ +""" +Tutorial - Hello World + +The most basic (working) CherryPy application possible. +""" + +# Import CherryPy global namespace +import cherrypy + + +class HelloWorld: + + """ Sample request handler class. """ + + # Expose the index method through the web. CherryPy will never + # publish methods that don't have the exposed attribute set to True. + @cherrypy.expose + def index(self): + # CherryPy will call this method for the root URI ("/") and send + # its return value to the client. Because this is tutorial + # lesson number 01, we'll just send something really simple. + # How about... + return "Hello world!" + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(HelloWorld(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut02_expose_methods.py b/deps/cherrypy/tutorial/tut02_expose_methods.py new file mode 100644 index 00000000..d9e07cbf --- /dev/null +++ b/deps/cherrypy/tutorial/tut02_expose_methods.py @@ -0,0 +1,30 @@ +""" +Tutorial - Multiple methods + +This tutorial shows you how to link to other methods of your request +handler. +""" + +import cherrypy + + +class HelloWorld: + + @cherrypy.expose + def index(self): + # Let's link to another method here. + return 'We have an
important message for you!' + + @cherrypy.expose + def show_msg(self): + # Here's the important message! + return "Hello world!" + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(HelloWorld(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut03_get_and_post.py b/deps/cherrypy/tutorial/tut03_get_and_post.py new file mode 100644 index 00000000..38b42e6b --- /dev/null +++ b/deps/cherrypy/tutorial/tut03_get_and_post.py @@ -0,0 +1,50 @@ +""" +Tutorial - Passing variables + +This tutorial shows you how to pass GET/POST variables to methods. +""" + +import cherrypy + + +class WelcomePage: + + @cherrypy.expose + def index(self): + # Ask for the user's name. + return ''' + + What is your name? + + +
''' + + @cherrypy.expose + def greetUser(self, name=None): + # CherryPy passes all GET and POST variables as method parameters. + # It doesn't make a difference where the variables come from, how + # large their contents are, and so on. + # + # You can define default parameter values as usual. In this + # example, the "name" parameter defaults to None so we can check + # if a name was actually specified. + + if name: + # Greet the user! + return "Hey %s, what's up?" % name + else: + if name is None: + # No name was specified + return 'Please enter your name here.' + else: + return 'No, really, enter your name here.' + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(WelcomePage(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut04_complex_site.py b/deps/cherrypy/tutorial/tut04_complex_site.py new file mode 100644 index 00000000..6f8654af --- /dev/null +++ b/deps/cherrypy/tutorial/tut04_complex_site.py @@ -0,0 +1,102 @@ +""" +Tutorial - Multiple objects + +This tutorial shows you how to create a site structure through multiple +possibly nested request handler objects. +""" + +import cherrypy + + +class HomePage: + + @cherrypy.expose + def index(self): + return ''' +

Hi, this is the home page! Check out the other + fun stuff on this site:

+ + ''' + + +class JokePage: + + @cherrypy.expose + def index(self): + return ''' +

"In Python, how do you create a string of random + characters?" -- "Read a Perl file!"

+

[Return]

''' + + +class LinksPage: + + def __init__(self): + # Request handler objects can create their own nested request + # handler objects. Simply create them inside their __init__ + # methods! + self.extra = ExtraLinksPage() + + @cherrypy.expose + def index(self): + # Note the way we link to the extra links page (and back). + # As you can see, this object doesn't really care about its + # absolute position in the site tree, since we use relative + # links exclusively. + return ''' +

Here are some useful links:

+ + + +

You can check out some extra useful + links here.

+ +

[Return]

+ ''' + + +class ExtraLinksPage: + + @cherrypy.expose + def index(self): + # Note the relative link back to the Links page! + return ''' +

Here are some extra useful links:

+ + + +

[Return to links page]

''' + + +# Of course we can also mount request handler objects right here! +root = HomePage() +root.joke = JokePage() +root.links = LinksPage() + +# Remember, we don't need to mount ExtraLinksPage here, because +# LinksPage does that itself on initialization. In fact, there is +# no reason why you shouldn't let your root object take care of +# creating all contained request handler objects. + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(root, config=tutconf) diff --git a/deps/cherrypy/tutorial/tut05_derived_objects.py b/deps/cherrypy/tutorial/tut05_derived_objects.py new file mode 100644 index 00000000..647fac56 --- /dev/null +++ b/deps/cherrypy/tutorial/tut05_derived_objects.py @@ -0,0 +1,79 @@ +""" +Tutorial - Object inheritance + +You are free to derive your request handler classes from any base +class you wish. In most real-world applications, you will probably +want to create a central base class used for all your pages, which takes +care of things like printing a common page header and footer. +""" + +import cherrypy + + +class Page: + # Store the page title in a class attribute + title = 'Untitled Page' + + def header(self): + return ''' + + + %s + + +

%s

+ ''' % (self.title, self.title) + + def footer(self): + return ''' + + + ''' + + # Note that header and footer don't get their exposed attributes + # set to True. This isn't necessary since the user isn't supposed + # to call header or footer directly; instead, we'll call them from + # within the actually exposed handler methods defined in this + # class' subclasses. + + +class HomePage(Page): + # Different title for this page + title = 'Tutorial 5' + + def __init__(self): + # create a subpage + self.another = AnotherPage() + + @cherrypy.expose + def index(self): + # Note that we call the header and footer methods inherited + # from the Page class! + return self.header() + ''' +

+ Isn't this exciting? There's + another page, too! +

+ ''' + self.footer() + + +class AnotherPage(Page): + title = 'Another Page' + + @cherrypy.expose + def index(self): + return self.header() + ''' +

+ And this is the amazing second page! +

+ ''' + self.footer() + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(HomePage(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut06_default_method.py b/deps/cherrypy/tutorial/tut06_default_method.py new file mode 100644 index 00000000..24f839d7 --- /dev/null +++ b/deps/cherrypy/tutorial/tut06_default_method.py @@ -0,0 +1,60 @@ +""" +Tutorial - The default method + +Request handler objects can implement a method called "default" that +is called when no other suitable method/object could be found. +Essentially, if CherryPy2 can't find a matching request handler object +for the given request URI, it will use the default method of the object +located deepest on the URI path. + +Using this mechanism you can easily simulate virtual URI structures +by parsing the extra URI string, which you can access through +cherrypy.request.virtualPath. + +The application in this tutorial simulates an URI structure looking +like /users/. Since the bit will not be found (as +there are no matching methods), it is handled by the default method. +""" + +import cherrypy + + +class UsersPage: + + @cherrypy.expose + def index(self): + # Since this is just a stupid little example, we'll simply + # display a list of links to random, made-up users. In a real + # application, this could be generated from a database result set. + return ''' + Remi Delon
+ Hendrik Mans
+ Lorenzo Lamas
+ ''' + + @cherrypy.expose + def default(self, user): + # Here we react depending on the virtualPath -- the part of the + # path that could not be mapped to an object method. In a real + # application, we would probably do some database lookups here + # instead of the silly if/elif/else construct. + if user == 'remi': + out = "Remi Delon, CherryPy lead developer" + elif user == 'hendrik': + out = "Hendrik Mans, CherryPy co-developer & crazy German" + elif user == 'lorenzo': + out = "Lorenzo Lamas, famous actor and singer!" + else: + out = "Unknown user. :-(" + + return '%s (back)' % out + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(UsersPage(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut07_sessions.py b/deps/cherrypy/tutorial/tut07_sessions.py new file mode 100644 index 00000000..e6d31540 --- /dev/null +++ b/deps/cherrypy/tutorial/tut07_sessions.py @@ -0,0 +1,40 @@ +""" +Tutorial - Sessions + +Storing session data in CherryPy applications is very easy: cherrypy +provides a dictionary called "session" that represents the session +data for the current user. If you use RAM based sessions, you can store +any kind of object into that dictionary; otherwise, you are limited to +objects that can be pickled. +""" + +import cherrypy + + +class HitCounter: + + _cp_config = {'tools.sessions.on': True} + + @cherrypy.expose + def index(self): + # Increase the silly hit counter + count = cherrypy.session.get('count', 0) + 1 + + # Store the new value in the session dictionary + cherrypy.session['count'] = count + + # And display a silly hit count message! + return ''' + During your current session, you've viewed this + page %s times! Your life is a patio of fun! + ''' % count + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(HitCounter(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut08_generators_and_yield.py b/deps/cherrypy/tutorial/tut08_generators_and_yield.py new file mode 100644 index 00000000..8c4fe839 --- /dev/null +++ b/deps/cherrypy/tutorial/tut08_generators_and_yield.py @@ -0,0 +1,43 @@ +""" +Bonus Tutorial: Using generators to return result bodies + +Instead of returning a complete result string, you can use the yield +statement to return one result part after another. This may be convenient +in situations where using a template package like CherryPy or Cheetah +would be overkill, and messy string concatenation too uncool. ;-) +""" + +import cherrypy + + +class GeneratorDemo: + + def header(self): + return "

Generators rule!

" + + def footer(self): + return "" + + @cherrypy.expose + def index(self): + # Let's make up a list of users for presentation purposes + users = ['Remi', 'Carlos', 'Hendrik', 'Lorenzo Lamas'] + + # Every yield line adds one part to the total result body. + yield self.header() + yield "

List of users:

" + + for user in users: + yield "%s
" % user + + yield self.footer() + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(GeneratorDemo(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut09_files.py b/deps/cherrypy/tutorial/tut09_files.py new file mode 100644 index 00000000..f76c1c65 --- /dev/null +++ b/deps/cherrypy/tutorial/tut09_files.py @@ -0,0 +1,104 @@ +""" + +Tutorial: File upload and download + +Uploads +------- + +When a client uploads a file to a CherryPy application, it's placed +on disk immediately. CherryPy will pass it to your exposed method +as an argument (see "myFile" below); that arg will have a "file" +attribute, which is a handle to the temporary uploaded file. +If you wish to permanently save the file, you need to read() +from myFile.file and write() somewhere else. + +Note the use of 'enctype="multipart/form-data"' and 'input type="file"' +in the HTML which the client uses to upload the file. + + +Downloads +--------- + +If you wish to send a file to the client, you have two options: +First, you can simply return a file-like object from your page handler. +CherryPy will read the file and serve it as the content (HTTP body) +of the response. However, that doesn't tell the client that +the response is a file to be saved, rather than displayed. +Use cherrypy.lib.static.serve_file for that; it takes four +arguments: + +serve_file(path, content_type=None, disposition=None, name=None) + +Set "name" to the filename that you expect clients to use when they save +your file. Note that the "name" argument is ignored if you don't also +provide a "disposition" (usually "attachement"). You can manually set +"content_type", but be aware that if you also use the encoding tool, it +may choke if the file extension is not recognized as belonging to a known +Content-Type. Setting the content_type to "application/x-download" works +in most cases, and should prompt the user with an Open/Save dialog in +popular browsers. + +""" + +import os +localDir = os.path.dirname(__file__) +absDir = os.path.join(os.getcwd(), localDir) + +import cherrypy +from cherrypy.lib import static + + +class FileDemo(object): + + @cherrypy.expose + def index(self): + return """ + +

Upload a file

+
+ filename:
+ +
+

Download a file

+ This one + + """ + + @cherrypy.expose + def upload(self, myFile): + out = """ + + myFile length: %s
+ myFile filename: %s
+ myFile mime-type: %s + + """ + + # Although this just counts the file length, it demonstrates + # how to read large files in chunks instead of all at once. + # CherryPy reads the uploaded file into a temporary file; + # myFile.file.read reads from that. + size = 0 + while True: + data = myFile.file.read(8192) + if not data: + break + size += len(data) + + return out % (size, myFile.filename, myFile.content_type) + + @cherrypy.expose + def download(self): + path = os.path.join(absDir, "pdf_file.pdf") + return static.serve_file(path, "application/x-download", + "attachment", os.path.basename(path)) + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(FileDemo(), config=tutconf) diff --git a/deps/cherrypy/tutorial/tut10_http_errors.py b/deps/cherrypy/tutorial/tut10_http_errors.py new file mode 100644 index 00000000..c1158ce6 --- /dev/null +++ b/deps/cherrypy/tutorial/tut10_http_errors.py @@ -0,0 +1,83 @@ +""" + +Tutorial: HTTP errors + +HTTPError is used to return an error response to the client. +CherryPy has lots of options regarding how such errors are +logged, displayed, and formatted. + +""" + +import os +localDir = os.path.dirname(__file__) +curpath = os.path.normpath(os.path.join(os.getcwd(), localDir)) + +import cherrypy + + +class HTTPErrorDemo(object): + + # Set a custom response for 403 errors. + _cp_config = {'error_page.403': + os.path.join(curpath, "custom_error.html")} + + @cherrypy.expose + def index(self): + # display some links that will result in errors + tracebacks = cherrypy.request.show_tracebacks + if tracebacks: + trace = 'off' + else: + trace = 'on' + + return """ + +

Toggle tracebacks %s

+

Click me; I'm a broken link!

+

+ + Use a custom error page from a file. + +

+

These errors are explicitly raised by the application:

+ +

You can also set the response body + when you raise an error.

+ + """ % trace + + @cherrypy.expose + def toggleTracebacks(self): + # simple function to toggle tracebacks on and off + tracebacks = cherrypy.request.show_tracebacks + cherrypy.config.update({'request.show_tracebacks': not tracebacks}) + + # redirect back to the index + raise cherrypy.HTTPRedirect('/') + + @cherrypy.expose + def error(self, code): + # raise an error based on the get query + raise cherrypy.HTTPError(status=code) + + @cherrypy.expose + def messageArg(self): + message = ("If you construct an HTTPError with a 'message' " + "argument, it wil be placed on the error page " + "(underneath the status line by default).") + raise cherrypy.HTTPError(500, message=message) + + +import os.path +tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf') + +if __name__ == '__main__': + # CherryPy always starts with app.root when trying to map request URIs + # to objects, so we need to mount a request handler root. A request + # to '/' will be mapped to HelloWorld().index(). + cherrypy.quickstart(HTTPErrorDemo(), config=tutconf) diff --git a/deps/cherrypy/wsgiserver/__init__.py b/deps/cherrypy/wsgiserver/__init__.py new file mode 100644 index 00000000..b8530d96 --- /dev/null +++ b/deps/cherrypy/wsgiserver/__init__.py @@ -0,0 +1,2569 @@ +"""A high-speed, production ready, thread pooled, generic HTTP server. + +Simplest example on how to use this module directly +(without using CherryPy's application machinery):: + + from cherrypy import wsgiserver + + def my_crazy_app(environ, start_response): + status = '200 OK' + response_headers = [('Content-type','text/plain')] + start_response(status, response_headers) + return ['Hello world!'] + + server = wsgiserver.CherryPyWSGIServer( + ('0.0.0.0', 8070), my_crazy_app, + server_name='www.cherrypy.example') + server.start() + +The CherryPy WSGI server can serve as many WSGI applications +as you want in one instance by using a WSGIPathInfoDispatcher:: + + d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app}) + server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d) + +Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance. + +This won't call the CherryPy engine (application side) at all, only the +HTTP server, which is independent from the rest of CherryPy. Don't +let the name "CherryPyWSGIServer" throw you; the name merely reflects +its origin, not its coupling. + +For those of you wanting to understand internals of this module, here's the +basic call flow. The server's listening thread runs a very tight loop, +sticking incoming connections onto a Queue:: + + server = CherryPyWSGIServer(...) + server.start() + while True: + tick() + # This blocks until a request comes in: + child = socket.accept() + conn = HTTPConnection(child, ...) + server.requests.put(conn) + +Worker threads are kept in a pool and poll the Queue, popping off and then +handling each connection in turn. Each connection can consist of an arbitrary +number of requests and their responses, so we run a nested loop:: + + while True: + conn = server.requests.get() + conn.communicate() + -> while True: + req = HTTPRequest(...) + req.parse_request() + -> # Read the Request-Line, e.g. "GET /page HTTP/1.1" + req.rfile.readline() + read_headers(req.rfile, req.inheaders) + req.respond() + -> response = app(...) + try: + for chunk in response: + if chunk: + req.write(chunk) + finally: + if hasattr(response, "close"): + response.close() + if req.close_connection: + return +""" + +__all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer', + 'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile', + 'CP_makefile', + 'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert', + 'WorkerThread', 'ThreadPool', 'SSLAdapter', + 'CherryPyWSGIServer', + 'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0', + 'WSGIPathInfoDispatcher', 'get_ssl_adapter_class', + 'socket_errors_to_ignore'] + +import os +try: + import queue +except: + import Queue as queue +import re +import email.utils +import socket +import sys +import threading +import time +import traceback as traceback_ +try: + from urllib.parse import unquote_to_bytes, urlparse +except ImportError: + from urlparse import unquote as unquote_to_bytes + from urlparse import urlparse +import errno +import logging + +import six +from six.moves import filter + +try: + # prefer slower Python-based io module + import _pyio as io +except ImportError: + # Python 2.6 + import io + +try: + import pkg_resources +except ImportError: + pass + +if 'win' in sys.platform and hasattr(socket, "AF_INET6"): + if not hasattr(socket, 'IPPROTO_IPV6'): + socket.IPPROTO_IPV6 = 41 + if not hasattr(socket, 'IPV6_V6ONLY'): + socket.IPV6_V6ONLY = 27 + + +DEFAULT_BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE + + +try: + cp_version = pkg_resources.require('cherrypy')[0].version +except Exception: + cp_version = 'unknown' + + +if six.PY3: + def ntob(n, encoding='ISO-8859-1'): + """Return the given native string as a byte string in the given + encoding. + """ + # In Python 3, the native string type is unicode + return n.encode(encoding) + + def bton(b, encoding='ISO-8859-1'): + return b.decode(encoding) +else: + def ntob(n, encoding='ISO-8859-1'): + """Return the given native string as a byte string in the given + encoding. + """ + # In Python 2, the native string type is bytes. Assume it's already + # in the given encoding, which for ISO-8859-1 is almost always what + # was intended. + return n + + def bton(b, encoding='ISO-8859-1'): + return b + + +LF = ntob('\n') +CRLF = ntob('\r\n') +TAB = ntob('\t') +SPACE = ntob(' ') +COLON = ntob(':') +SEMICOLON = ntob(';') +EMPTY = ntob('') +NUMBER_SIGN = ntob('#') +QUESTION_MARK = ntob('?') +ASTERISK = ntob('*') +FORWARD_SLASH = ntob('/') +quoted_slash = re.compile(ntob("(?i)%2F")) + + +def plat_specific_errors(*errnames): + """Return error numbers for all errors in errnames on this platform. + + The 'errno' module contains different global constants depending on + the specific platform (OS). This function will return the list of + numeric values for a given list of potential names. + """ + errno_names = dir(errno) + nums = [getattr(errno, k) for k in errnames if k in errno_names] + # de-dupe the list + return list(dict.fromkeys(nums).keys()) + +socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR") + +socket_errors_to_ignore = plat_specific_errors( + "EPIPE", + "EBADF", "WSAEBADF", + "ENOTSOCK", "WSAENOTSOCK", + "ETIMEDOUT", "WSAETIMEDOUT", + "ECONNREFUSED", "WSAECONNREFUSED", + "ECONNRESET", "WSAECONNRESET", + "ECONNABORTED", "WSAECONNABORTED", + "ENETRESET", "WSAENETRESET", + "EHOSTDOWN", "EHOSTUNREACH", +) +socket_errors_to_ignore.append("timed out") +socket_errors_to_ignore.append("The read operation timed out") +if sys.platform == 'darwin': + socket_errors_to_ignore.append(plat_specific_errors("EPROTOTYPE")) + +socket_errors_nonblocking = plat_specific_errors( + 'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK') + +comma_separated_headers = [ + ntob(h) for h in + ['Accept', 'Accept-Charset', 'Accept-Encoding', + 'Accept-Language', 'Accept-Ranges', 'Allow', 'Cache-Control', + 'Connection', 'Content-Encoding', 'Content-Language', 'Expect', + 'If-Match', 'If-None-Match', 'Pragma', 'Proxy-Authenticate', 'TE', + 'Trailer', 'Transfer-Encoding', 'Upgrade', 'Vary', 'Via', 'Warning', + 'WWW-Authenticate'] +] + + +if not hasattr(logging, 'statistics'): + logging.statistics = {} + + +def read_headers(rfile, hdict=None): + """Read headers from the given stream into the given header dict. + + If hdict is None, a new header dict is created. Returns the populated + header dict. + + Headers which are repeated are folded together using a comma if their + specification so dictates. + + This function raises ValueError when the read bytes violate the HTTP spec. + You should probably return "400 Bad Request" if this happens. + """ + if hdict is None: + hdict = {} + + while True: + line = rfile.readline() + if not line: + # No more data--illegal end of headers + raise ValueError("Illegal end of headers.") + + if line == CRLF: + # Normal end of headers + break + if not line.endswith(CRLF): + raise ValueError("HTTP requires CRLF terminators") + + if line[0] in (SPACE, TAB): + # It's a continuation line. + v = line.strip() + else: + try: + k, v = line.split(COLON, 1) + except ValueError: + raise ValueError("Illegal header line.") + # TODO: what about TE and WWW-Authenticate? + k = k.strip().title() + v = v.strip() + hname = k + + if k in comma_separated_headers: + existing = hdict.get(hname) + if existing: + v = b", ".join((existing, v)) + hdict[hname] = v + + return hdict + + +class MaxSizeExceeded(Exception): + pass + + +class SizeCheckWrapper(object): + + """Wraps a file-like object, raising MaxSizeExceeded if too large.""" + + def __init__(self, rfile, maxlen): + self.rfile = rfile + self.maxlen = maxlen + self.bytes_read = 0 + + def _check_length(self): + if self.maxlen and self.bytes_read > self.maxlen: + raise MaxSizeExceeded() + + def read(self, size=None): + data = self.rfile.read(size) + self.bytes_read += len(data) + self._check_length() + return data + + def readline(self, size=None): + if size is not None: + data = self.rfile.readline(size) + self.bytes_read += len(data) + self._check_length() + return data + + # User didn't specify a size ... + # We read the line in chunks to make sure it's not a 100MB line ! + res = [] + while True: + data = self.rfile.readline(256) + self.bytes_read += len(data) + self._check_length() + res.append(data) + # See https://github.com/cherrypy/cherrypy/issues/421 + if len(data) < 256 or data[-1:] == LF: + return EMPTY.join(res) + + def readlines(self, sizehint=0): + # Shamelessly stolen from StringIO + total = 0 + lines = [] + line = self.readline() + while line: + lines.append(line) + total += len(line) + if 0 < sizehint <= total: + break + line = self.readline() + return lines + + def close(self): + self.rfile.close() + + def __iter__(self): + return self + + def __next__(self): + data = next(self.rfile) + self.bytes_read += len(data) + self._check_length() + return data + + def next(self): + data = self.rfile.next() + self.bytes_read += len(data) + self._check_length() + return data + + +class KnownLengthRFile(object): + + """Wraps a file-like object, returning an empty string when exhausted.""" + + def __init__(self, rfile, content_length): + self.rfile = rfile + self.remaining = content_length + + def read(self, size=None): + if self.remaining == 0: + return b'' + if size is None: + size = self.remaining + else: + size = min(size, self.remaining) + + data = self.rfile.read(size) + self.remaining -= len(data) + return data + + def readline(self, size=None): + if self.remaining == 0: + return b'' + if size is None: + size = self.remaining + else: + size = min(size, self.remaining) + + data = self.rfile.readline(size) + self.remaining -= len(data) + return data + + def readlines(self, sizehint=0): + # Shamelessly stolen from StringIO + total = 0 + lines = [] + line = self.readline(sizehint) + while line: + lines.append(line) + total += len(line) + if 0 < sizehint <= total: + break + line = self.readline(sizehint) + return lines + + def close(self): + self.rfile.close() + + def __iter__(self): + return self + + def __next__(self): + data = next(self.rfile) + self.remaining -= len(data) + return data + + +class ChunkedRFile(object): + + """Wraps a file-like object, returning an empty string when exhausted. + + This class is intended to provide a conforming wsgi.input value for + request entities that have been encoded with the 'chunked' transfer + encoding. + """ + + def __init__(self, rfile, maxlen, bufsize=8192): + self.rfile = rfile + self.maxlen = maxlen + self.bytes_read = 0 + self.buffer = EMPTY + self.bufsize = bufsize + self.closed = False + + def _fetch(self): + if self.closed: + return + + line = self.rfile.readline() + self.bytes_read += len(line) + + if self.maxlen and self.bytes_read > self.maxlen: + raise MaxSizeExceeded("Request Entity Too Large", self.maxlen) + + line = line.strip().split(SEMICOLON, 1) + + try: + chunk_size = line.pop(0) + chunk_size = int(chunk_size, 16) + except ValueError: + raise ValueError("Bad chunked transfer size: " + repr(chunk_size)) + + if chunk_size <= 0: + self.closed = True + return + +## if line: chunk_extension = line[0] + + if self.maxlen and self.bytes_read + chunk_size > self.maxlen: + raise IOError("Request Entity Too Large") + + chunk = self.rfile.read(chunk_size) + self.bytes_read += len(chunk) + self.buffer += chunk + + crlf = self.rfile.read(2) + if crlf != CRLF: + raise ValueError( + "Bad chunked transfer coding (expected '\\r\\n', " + "got " + repr(crlf) + ")") + + def read(self, size=None): + data = EMPTY + while True: + if size and len(data) >= size: + return data + + if not self.buffer: + self._fetch() + if not self.buffer: + # EOF + return data + + if size: + remaining = size - len(data) + data += self.buffer[:remaining] + self.buffer = self.buffer[remaining:] + else: + data += self.buffer + + def readline(self, size=None): + data = EMPTY + while True: + if size and len(data) >= size: + return data + + if not self.buffer: + self._fetch() + if not self.buffer: + # EOF + return data + + newline_pos = self.buffer.find(LF) + if size: + if newline_pos == -1: + remaining = size - len(data) + data += self.buffer[:remaining] + self.buffer = self.buffer[remaining:] + else: + remaining = min(size - len(data), newline_pos) + data += self.buffer[:remaining] + self.buffer = self.buffer[remaining:] + else: + if newline_pos == -1: + data += self.buffer + else: + data += self.buffer[:newline_pos] + self.buffer = self.buffer[newline_pos:] + + def readlines(self, sizehint=0): + # Shamelessly stolen from StringIO + total = 0 + lines = [] + line = self.readline(sizehint) + while line: + lines.append(line) + total += len(line) + if 0 < sizehint <= total: + break + line = self.readline(sizehint) + return lines + + def read_trailer_lines(self): + if not self.closed: + raise ValueError( + "Cannot read trailers until the request body has been read.") + + while True: + line = self.rfile.readline() + if not line: + # No more data--illegal end of headers + raise ValueError("Illegal end of headers.") + + self.bytes_read += len(line) + if self.maxlen and self.bytes_read > self.maxlen: + raise IOError("Request Entity Too Large") + + if line == CRLF: + # Normal end of headers + break + if not line.endswith(CRLF): + raise ValueError("HTTP requires CRLF terminators") + + yield line + + def close(self): + self.rfile.close() + + +class HTTPRequest(object): + + """An HTTP Request (and response). + + A single HTTP connection may consist of multiple request/response pairs. + """ + + server = None + """The HTTPServer object which is receiving this request.""" + + conn = None + """The HTTPConnection object on which this request connected.""" + + inheaders = {} + """A dict of request headers.""" + + outheaders = [] + """A list of header tuples to write in the response.""" + + ready = False + """When True, the request has been parsed and is ready to begin generating + the response. When False, signals the calling Connection that the response + should not be generated and the connection should close.""" + + close_connection = False + """Signals the calling Connection that the request should close. This does + not imply an error! The client and/or server may each request that the + connection be closed.""" + + chunked_write = False + """If True, output will be encoded with the "chunked" transfer-coding. + + This value is set automatically inside send_headers.""" + + def __init__(self, server, conn): + self.server = server + self.conn = conn + + self.ready = False + self.started_request = False + self.scheme = ntob("http") + if self.server.ssl_adapter is not None: + self.scheme = ntob("https") + # Use the lowest-common protocol in case read_request_line errors. + self.response_protocol = 'HTTP/1.0' + self.inheaders = {} + + self.status = "" + self.outheaders = [] + self.sent_headers = False + self.close_connection = self.__class__.close_connection + self.chunked_read = False + self.chunked_write = self.__class__.chunked_write + + def parse_request(self): + """Parse the next HTTP request start-line and message-headers.""" + self.rfile = SizeCheckWrapper(self.conn.rfile, + self.server.max_request_header_size) + try: + success = self.read_request_line() + except MaxSizeExceeded: + self.simple_response( + "414 Request-URI Too Long", + "The Request-URI sent with the request exceeds the maximum " + "allowed bytes.") + return + else: + if not success: + return + + try: + success = self.read_request_headers() + except MaxSizeExceeded: + self.simple_response( + "413 Request Entity Too Large", + "The headers sent with the request exceed the maximum " + "allowed bytes.") + return + else: + if not success: + return + + self.ready = True + + def read_request_line(self): + # HTTP/1.1 connections are persistent by default. If a client + # requests a page, then idles (leaves the connection open), + # then rfile.readline() will raise socket.error("timed out"). + # Note that it does this based on the value given to settimeout(), + # and doesn't need the client to request or acknowledge the close + # (although your TCP stack might suffer for it: cf Apache's history + # with FIN_WAIT_2). + request_line = self.rfile.readline() + + # Set started_request to True so communicate() knows to send 408 + # from here on out. + self.started_request = True + if not request_line: + return False + + if request_line == CRLF: + # RFC 2616 sec 4.1: "...if the server is reading the protocol + # stream at the beginning of a message and receives a CRLF + # first, it should ignore the CRLF." + # But only ignore one leading line! else we enable a DoS. + request_line = self.rfile.readline() + if not request_line: + return False + + if not request_line.endswith(CRLF): + self.simple_response( + "400 Bad Request", "HTTP requires CRLF terminators") + return False + + try: + method, uri, req_protocol = request_line.strip().split(SPACE, 2) + req_protocol_str = req_protocol.decode('ascii') + rp = int(req_protocol_str[5]), int(req_protocol_str[7]) + except (ValueError, IndexError): + self.simple_response("400 Bad Request", "Malformed Request-Line") + return False + + self.uri = uri + self.method = method + + # uri may be an abs_path (including "http://host.domain.tld"); + scheme, authority, path = self.parse_request_uri(uri) + if path is None: + self.simple_response("400 Bad Request", + "Invalid path in Request-URI.") + return False + if NUMBER_SIGN in path: + self.simple_response("400 Bad Request", + "Illegal #fragment in Request-URI.") + return False + + if scheme: + self.scheme = scheme + + qs = EMPTY + if QUESTION_MARK in path: + path, qs = path.split(QUESTION_MARK, 1) + + # Unquote the path+params (e.g. "/this%20path" -> "/this path"). + # http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2 + # + # But note that "...a URI must be separated into its components + # before the escaped characters within those components can be + # safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2 + # Therefore, "/this%2Fpath" becomes "/this%2Fpath", not "/this/path". + try: + atoms = [unquote_to_bytes(x) for x in quoted_slash.split(path)] + except ValueError: + ex = sys.exc_info()[1] + self.simple_response("400 Bad Request", ex.args[0]) + return False + path = b"%2F".join(atoms) + self.path = path + + # Note that, like wsgiref and most other HTTP servers, + # we "% HEX HEX"-unquote the path but not the query string. + self.qs = qs + + # Compare request and server HTTP protocol versions, in case our + # server does not support the requested protocol. Limit our output + # to min(req, server). We want the following output: + # request server actual written supported response + # protocol protocol response protocol feature set + # a 1.0 1.0 1.0 1.0 + # b 1.0 1.1 1.1 1.0 + # c 1.1 1.0 1.0 1.0 + # d 1.1 1.1 1.1 1.1 + # Notice that, in (b), the response will be "HTTP/1.1" even though + # the client only understands 1.0. RFC 2616 10.5.6 says we should + # only return 505 if the _major_ version is different. + sp = int(self.server.protocol[5]), int(self.server.protocol[7]) + + if sp[0] != rp[0]: + self.simple_response("505 HTTP Version Not Supported") + return False + + self.request_protocol = req_protocol + self.response_protocol = "HTTP/%s.%s" % min(rp, sp) + + return True + + def read_request_headers(self): + """Read self.rfile into self.inheaders. Return success.""" + + # then all the http headers + try: + read_headers(self.rfile, self.inheaders) + except ValueError: + ex = sys.exc_info()[1] + self.simple_response("400 Bad Request", ex.args[0]) + return False + + mrbs = self.server.max_request_body_size + if mrbs and int(self.inheaders.get(b"Content-Length", 0)) > mrbs: + self.simple_response( + "413 Request Entity Too Large", + "The entity sent with the request exceeds the maximum " + "allowed bytes.") + return False + + # Persistent connection support + if self.response_protocol == "HTTP/1.1": + # Both server and client are HTTP/1.1 + if self.inheaders.get(b"Connection", b"") == b"close": + self.close_connection = True + else: + # Either the server or client (or both) are HTTP/1.0 + if self.inheaders.get(b"Connection", b"") != b"Keep-Alive": + self.close_connection = True + + # Transfer-Encoding support + te = None + if self.response_protocol == "HTTP/1.1": + te = self.inheaders.get(b"Transfer-Encoding") + if te: + te = [x.strip().lower() for x in te.split(b",") if x.strip()] + + self.chunked_read = False + + if te: + for enc in te: + if enc == b"chunked": + self.chunked_read = True + else: + # Note that, even if we see "chunked", we must reject + # if there is an extension we don't recognize. + self.simple_response("501 Unimplemented") + self.close_connection = True + return False + + # From PEP 333: + # "Servers and gateways that implement HTTP 1.1 must provide + # transparent support for HTTP 1.1's "expect/continue" mechanism. + # This may be done in any of several ways: + # 1. Respond to requests containing an Expect: 100-continue request + # with an immediate "100 Continue" response, and proceed normally. + # 2. Proceed with the request normally, but provide the application + # with a wsgi.input stream that will send the "100 Continue" + # response if/when the application first attempts to read from + # the input stream. The read request must then remain blocked + # until the client responds. + # 3. Wait until the client decides that the server does not support + # expect/continue, and sends the request body on its own. + # (This is suboptimal, and is not recommended.) + # + # We used to do 3, but are now doing 1. Maybe we'll do 2 someday, + # but it seems like it would be a big slowdown for such a rare case. + if self.inheaders.get(b"Expect", b"") == b"100-continue": + # Don't use simple_response here, because it emits headers + # we don't want. See + # https://github.com/cherrypy/cherrypy/issues/951 + msg = self.server.protocol.encode('ascii') + msg += b" 100 Continue\r\n\r\n" + try: + self.conn.wfile.write(msg) + except socket.error: + x = sys.exc_info()[1] + if x.args[0] not in socket_errors_to_ignore: + raise + return True + + def parse_request_uri(self, uri): + """Parse a Request-URI into (scheme, authority, path). + + Note that Request-URI's must be one of:: + + Request-URI = "*" | absoluteURI | abs_path | authority + + Therefore, a Request-URI which starts with a double forward-slash + cannot be a "net_path":: + + net_path = "//" authority [ abs_path ] + + Instead, it must be interpreted as an "abs_path" with an empty first + path segment:: + + abs_path = "/" path_segments + path_segments = segment *( "/" segment ) + segment = *pchar *( ";" param ) + param = *pchar + """ + if uri == ASTERISK: + return None, None, uri + + scheme, authority, path, params, query, fragment = urlparse(uri) + if scheme and QUESTION_MARK not in scheme: + # An absoluteURI. + # If there's a scheme (and it must be http or https), then: + # http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query + # ]] + return scheme, authority, path + + if uri.startswith(FORWARD_SLASH): + # An abs_path. + return None, None, uri + else: + # An authority. + return None, uri, None + + def respond(self): + """Call the gateway and write its iterable output.""" + mrbs = self.server.max_request_body_size + if self.chunked_read: + self.rfile = ChunkedRFile(self.conn.rfile, mrbs) + else: + cl = int(self.inheaders.get(b"Content-Length", 0)) + if mrbs and mrbs < cl: + if not self.sent_headers: + self.simple_response( + "413 Request Entity Too Large", + "The entity sent with the request exceeds the " + "maximum allowed bytes.") + return + self.rfile = KnownLengthRFile(self.conn.rfile, cl) + + self.server.gateway(self).respond() + + if (self.ready and not self.sent_headers): + self.sent_headers = True + self.send_headers() + if self.chunked_write: + self.conn.wfile.write(b"0\r\n\r\n") + + def simple_response(self, status, msg=""): + """Write a simple response back to the client.""" + status = str(status) + proto_status = "%s %s\r\n" % (self.server.protocol, status) + content_length = "Content-Length: %s\r\n" % len(msg) + content_type = "Content-Type: text/plain\r\n" + buf = [ + proto_status.encode("ISO-8859-1"), + content_length.encode("ISO-8859-1"), + content_type.encode("ISO-8859-1"), + ] + + if status[:3] in ("413", "414"): + # Request Entity Too Large / Request-URI Too Long + self.close_connection = True + if self.response_protocol == 'HTTP/1.1': + # This will not be true for 414, since read_request_line + # usually raises 414 before reading the whole line, and we + # therefore cannot know the proper response_protocol. + buf.append(b"Connection: close\r\n") + else: + # HTTP/1.0 had no 413/414 status nor Connection header. + # Emit 400 instead and trust the message body is enough. + status = "400 Bad Request" + + buf.append(CRLF) + if msg: + if isinstance(msg, six.text_type): + msg = msg.encode("ISO-8859-1") + buf.append(msg) + + try: + self.conn.wfile.write(EMPTY.join(buf)) + except socket.error: + x = sys.exc_info()[1] + if x.args[0] not in socket_errors_to_ignore: + raise + + def write(self, chunk): + """Write unbuffered data to the client.""" + if self.chunked_write and chunk: + chunk_size_hex = hex(len(chunk))[2:].encode('ascii') + buf = [chunk_size_hex, CRLF, chunk, CRLF] + self.conn.wfile.write(EMPTY.join(buf)) + else: + self.conn.wfile.write(chunk) + + def send_headers(self): + """Assert, process, and send the HTTP response message-headers. + + You must set self.status, and self.outheaders before calling this. + """ + hkeys = [key.lower() for key, value in self.outheaders] + status = int(self.status[:3]) + + if status == 413: + # Request Entity Too Large. Close conn to avoid garbage. + self.close_connection = True + elif b"content-length" not in hkeys: + # "All 1xx (informational), 204 (no content), + # and 304 (not modified) responses MUST NOT + # include a message-body." So no point chunking. + if status < 200 or status in (204, 205, 304): + pass + else: + if (self.response_protocol == 'HTTP/1.1' + and self.method != b'HEAD'): + # Use the chunked transfer-coding + self.chunked_write = True + self.outheaders.append((b"Transfer-Encoding", b"chunked")) + else: + # Closing the conn is the only way to determine len. + self.close_connection = True + + if b"connection" not in hkeys: + if self.response_protocol == 'HTTP/1.1': + # Both server and client are HTTP/1.1 or better + if self.close_connection: + self.outheaders.append((b"Connection", b"close")) + else: + # Server and/or client are HTTP/1.0 + if not self.close_connection: + self.outheaders.append((b"Connection", b"Keep-Alive")) + + if (not self.close_connection) and (not self.chunked_read): + # Read any remaining request body data on the socket. + # "If an origin server receives a request that does not include an + # Expect request-header field with the "100-continue" expectation, + # the request includes a request body, and the server responds + # with a final status code before reading the entire request body + # from the transport connection, then the server SHOULD NOT close + # the transport connection until it has read the entire request, + # or until the client closes the connection. Otherwise, the client + # might not reliably receive the response message. However, this + # requirement is not be construed as preventing a server from + # defending itself against denial-of-service attacks, or from + # badly broken client implementations." + remaining = getattr(self.rfile, 'remaining', 0) + if remaining > 0: + self.rfile.read(remaining) + + if b"date" not in hkeys: + self.outheaders.append(( + b"Date", + email.utils.formatdate(usegmt=True).encode('ISO-8859-1'), + )) + + if b"server" not in hkeys: + self.outheaders.append(( + b"Server", + self.server.server_name.encode('ISO-8859-1'), + )) + + proto = self.server.protocol.encode('ascii') + buf = [proto + SPACE + self.status + CRLF] + for k, v in self.outheaders: + buf.append(k + COLON + SPACE + v + CRLF) + buf.append(CRLF) + self.conn.wfile.write(EMPTY.join(buf)) + + +class NoSSLError(Exception): + + """Exception raised when a client speaks HTTP to an HTTPS socket.""" + pass + + +class FatalSSLAlert(Exception): + + """Exception raised when the SSL implementation signals a fatal alert.""" + pass + + +class CP_BufferedWriter(io.BufferedWriter): + + """Faux file object attached to a socket object.""" + + def write(self, b): + self._checkClosed() + if isinstance(b, str): + raise TypeError("can't write str to binary stream") + + with self._write_lock: + self._write_buf.extend(b) + self._flush_unlocked() + return len(b) + + def _flush_unlocked(self): + self._checkClosed("flush of closed file") + while self._write_buf: + try: + # ssl sockets only except 'bytes', not bytearrays + # so perhaps we should conditionally wrap this for perf? + n = self.raw.write(bytes(self._write_buf)) + except io.BlockingIOError as e: + n = e.characters_written + del self._write_buf[:n] + + +def CP_makefile_PY3(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): + if 'r' in mode: + return io.BufferedReader(socket.SocketIO(sock, mode), bufsize) + else: + return CP_BufferedWriter(socket.SocketIO(sock, mode), bufsize) + + +class CP_makefile_PY2(getattr(socket, '_fileobject', object)): + + """Faux file object attached to a socket object.""" + + def __init__(self, *args, **kwargs): + self.bytes_read = 0 + self.bytes_written = 0 + socket._fileobject.__init__(self, *args, **kwargs) + + def write(self, data): + """Sendall for non-blocking sockets.""" + while data: + try: + bytes_sent = self.send(data) + data = data[bytes_sent:] + except socket.error as e: + if e.args[0] not in socket_errors_nonblocking: + raise + + def send(self, data): + bytes_sent = self._sock.send(data) + self.bytes_written += bytes_sent + return bytes_sent + + def flush(self): + if self._wbuf: + buffer = "".join(self._wbuf) + self._wbuf = [] + self.write(buffer) + + def recv(self, size): + while True: + try: + data = self._sock.recv(size) + self.bytes_read += len(data) + return data + except socket.error as e: + if (e.args[0] not in socket_errors_nonblocking + and e.args[0] not in socket_error_eintr): + raise + + class FauxSocket(object): + + """Faux socket with the minimal interface required by pypy""" + + def _reuse(self): + pass + + _fileobject_uses_str_type = six.PY2 and isinstance( + socket._fileobject(FauxSocket())._rbuf, six.string_types) + + # FauxSocket is no longer needed + del FauxSocket + + if not _fileobject_uses_str_type: + def read(self, size=-1): + # Use max, disallow tiny reads in a loop as they are very + # inefficient. + # We never leave read() with any leftover data from a new recv() + # call in our internal buffer. + rbufsize = max(self._rbufsize, self.default_bufsize) + # Our use of StringIO rather than lists of string objects returned + # by recv() minimizes memory usage and fragmentation that occurs + # when rbufsize is large compared to the typical return value of + # recv(). + buf = self._rbuf + buf.seek(0, 2) # seek end + if size < 0: + # Read until EOF + # reset _rbuf. we consume it via buf. + self._rbuf = io.BytesIO() + while True: + data = self.recv(rbufsize) + if not data: + break + buf.write(data) + return buf.getvalue() + else: + # Read until size bytes or EOF seen, whichever comes first + buf_len = buf.tell() + if buf_len >= size: + # Already have size bytes in our buffer? Extract and + # return. + buf.seek(0) + rv = buf.read(size) + self._rbuf = io.BytesIO() + self._rbuf.write(buf.read()) + return rv + + # reset _rbuf. we consume it via buf. + self._rbuf = io.BytesIO() + while True: + left = size - buf_len + # recv() will malloc the amount of memory given as its + # parameter even though it often returns much less data + # than that. The returned data string is short lived + # as we copy it into a StringIO and free it. This avoids + # fragmentation issues on many platforms. + data = self.recv(left) + if not data: + break + n = len(data) + if n == size and not buf_len: + # Shortcut. Avoid buffer data copies when: + # - We have no data in our buffer. + # AND + # - Our call to recv returned exactly the + # number of bytes we were asked to read. + return data + if n == left: + buf.write(data) + del data # explicit free + break + assert n <= left, "recv(%d) returned %d bytes" % (left, n) + buf.write(data) + buf_len += n + del data # explicit free + #assert buf_len == buf.tell() + return buf.getvalue() + + def readline(self, size=-1): + buf = self._rbuf + buf.seek(0, 2) # seek end + if buf.tell() > 0: + # check if we already have it in our buffer + buf.seek(0) + bline = buf.readline(size) + if bline.endswith('\n') or len(bline) == size: + self._rbuf = io.BytesIO() + self._rbuf.write(buf.read()) + return bline + del bline + if size < 0: + # Read until \n or EOF, whichever comes first + if self._rbufsize <= 1: + # Speed up unbuffered case + buf.seek(0) + buffers = [buf.read()] + # reset _rbuf. we consume it via buf. + self._rbuf = io.BytesIO() + data = None + recv = self.recv + while data != "\n": + data = recv(1) + if not data: + break + buffers.append(data) + return "".join(buffers) + + buf.seek(0, 2) # seek end + # reset _rbuf. we consume it via buf. + self._rbuf = io.BytesIO() + while True: + data = self.recv(self._rbufsize) + if not data: + break + nl = data.find('\n') + if nl >= 0: + nl += 1 + buf.write(data[:nl]) + self._rbuf.write(data[nl:]) + del data + break + buf.write(data) + return buf.getvalue() + else: + # Read until size bytes or \n or EOF seen, whichever comes + # first + buf.seek(0, 2) # seek end + buf_len = buf.tell() + if buf_len >= size: + buf.seek(0) + rv = buf.read(size) + self._rbuf = io.BytesIO() + self._rbuf.write(buf.read()) + return rv + # reset _rbuf. we consume it via buf. + self._rbuf = io.BytesIO() + while True: + data = self.recv(self._rbufsize) + if not data: + break + left = size - buf_len + # did we just receive a newline? + nl = data.find('\n', 0, left) + if nl >= 0: + nl += 1 + # save the excess data to _rbuf + self._rbuf.write(data[nl:]) + if buf_len: + buf.write(data[:nl]) + break + else: + # Shortcut. Avoid data copy through buf when + # returning a substring of our first recv(). + return data[:nl] + n = len(data) + if n == size and not buf_len: + # Shortcut. Avoid data copy through buf when + # returning exactly all of our first recv(). + return data + if n >= left: + buf.write(data[:left]) + self._rbuf.write(data[left:]) + break + buf.write(data) + buf_len += n + #assert buf_len == buf.tell() + return buf.getvalue() + else: + def read(self, size=-1): + if size < 0: + # Read until EOF + buffers = [self._rbuf] + self._rbuf = "" + if self._rbufsize <= 1: + recv_size = self.default_bufsize + else: + recv_size = self._rbufsize + + while True: + data = self.recv(recv_size) + if not data: + break + buffers.append(data) + return "".join(buffers) + else: + # Read until size bytes or EOF seen, whichever comes first + data = self._rbuf + buf_len = len(data) + if buf_len >= size: + self._rbuf = data[size:] + return data[:size] + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + while True: + left = size - buf_len + recv_size = max(self._rbufsize, left) + data = self.recv(recv_size) + if not data: + break + buffers.append(data) + n = len(data) + if n >= left: + self._rbuf = data[left:] + buffers[-1] = data[:left] + break + buf_len += n + return "".join(buffers) + + def readline(self, size=-1): + data = self._rbuf + if size < 0: + # Read until \n or EOF, whichever comes first + if self._rbufsize <= 1: + # Speed up unbuffered case + assert data == "" + buffers = [] + while data != "\n": + data = self.recv(1) + if not data: + break + buffers.append(data) + return "".join(buffers) + nl = data.find('\n') + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + return data[:nl] + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + while True: + data = self.recv(self._rbufsize) + if not data: + break + buffers.append(data) + nl = data.find('\n') + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + buffers[-1] = data[:nl] + break + return "".join(buffers) + else: + # Read until size bytes or \n or EOF seen, whichever comes + # first + nl = data.find('\n', 0, size) + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + return data[:nl] + buf_len = len(data) + if buf_len >= size: + self._rbuf = data[size:] + return data[:size] + buffers = [] + if data: + buffers.append(data) + self._rbuf = "" + while True: + data = self.recv(self._rbufsize) + if not data: + break + buffers.append(data) + left = size - buf_len + nl = data.find('\n', 0, left) + if nl >= 0: + nl += 1 + self._rbuf = data[nl:] + buffers[-1] = data[:nl] + break + n = len(data) + if n >= left: + self._rbuf = data[left:] + buffers[-1] = data[:left] + break + buf_len += n + return "".join(buffers) + + +CP_makefile = CP_makefile_PY2 if six.PY2 else CP_makefile_PY3 + + +class HTTPConnection(object): + + """An HTTP connection (active socket). + + server: the Server object which received this connection. + socket: the raw socket object (usually TCP) for this connection. + makefile: a fileobject class for reading from the socket. + """ + + remote_addr = None + remote_port = None + ssl_env = None + rbufsize = DEFAULT_BUFFER_SIZE + wbufsize = DEFAULT_BUFFER_SIZE + RequestHandlerClass = HTTPRequest + + def __init__(self, server, sock, makefile=CP_makefile): + self.server = server + self.socket = sock + self.rfile = makefile(sock, "rb", self.rbufsize) + self.wfile = makefile(sock, "wb", self.wbufsize) + self.requests_seen = 0 + + def communicate(self): + """Read each request and respond appropriately.""" + request_seen = False + try: + while True: + # (re)set req to None so that if something goes wrong in + # the RequestHandlerClass constructor, the error doesn't + # get written to the previous request. + req = None + req = self.RequestHandlerClass(self.server, self) + + # This order of operations should guarantee correct pipelining. + req.parse_request() + if self.server.stats['Enabled']: + self.requests_seen += 1 + if not req.ready: + # Something went wrong in the parsing (and the server has + # probably already made a simple_response). Return and + # let the conn close. + return + + request_seen = True + req.respond() + if req.close_connection: + return + except socket.error: + e = sys.exc_info()[1] + errnum = e.args[0] + # sadly SSL sockets return a different (longer) time out string + if ( + errnum == 'timed out' or + errnum == 'The read operation timed out' + ): + # Don't error if we're between requests; only error + # if 1) no request has been started at all, or 2) we're + # in the middle of a request. + # See https://github.com/cherrypy/cherrypy/issues/853 + if (not request_seen) or (req and req.started_request): + # Don't bother writing the 408 if the response + # has already started being written. + if req and not req.sent_headers: + try: + req.simple_response("408 Request Timeout") + except FatalSSLAlert: + # Close the connection. + return + except NoSSLError: + self._handle_no_ssl() + elif errnum not in socket_errors_to_ignore: + self.server.error_log("socket.error %s" % repr(errnum), + level=logging.WARNING, traceback=True) + if req and not req.sent_headers: + try: + req.simple_response("500 Internal Server Error") + except FatalSSLAlert: + # Close the connection. + return + except NoSSLError: + self._handle_no_ssl() + return + except (KeyboardInterrupt, SystemExit): + raise + except FatalSSLAlert: + # Close the connection. + return + except NoSSLError: + self._handle_no_ssl(req) + except Exception: + e = sys.exc_info()[1] + self.server.error_log(repr(e), level=logging.ERROR, traceback=True) + if req and not req.sent_headers: + try: + req.simple_response("500 Internal Server Error") + except FatalSSLAlert: + # Close the connection. + return + + linger = False + + def _handle_no_ssl(self, req): + if not req or req.sent_headers: + return + # Unwrap wfile + self.wfile = CP_makefile(self.socket._sock, "wb", self.wbufsize) + msg = ( + "The client sent a plain HTTP request, but " + "this server only speaks HTTPS on this port." + ) + req.simple_response("400 Bad Request", msg) + self.linger = True + + def close(self): + """Close the socket underlying this connection.""" + self.rfile.close() + + if not self.linger: + self._close_kernel_socket() + self.socket.close() + else: + # On the other hand, sometimes we want to hang around for a bit + # to make sure the client has a chance to read our entire + # response. Skipping the close() calls here delays the FIN + # packet until the socket object is garbage-collected later. + # Someday, perhaps, we'll do the full lingering_close that + # Apache does, but not today. + pass + + def _close_kernel_socket(self): + """ + On old Python versions, + Python's socket module does NOT call close on the kernel + socket when you call socket.close(). We do so manually here + because we want this server to send a FIN TCP segment + immediately. Note this must be called *before* calling + socket.close(), because the latter drops its reference to + the kernel socket. + """ + if six.PY2 and hasattr(self.socket, '_sock'): + self.socket._sock.close() + + +class TrueyZero(object): + + """An object which equals and does math like the integer 0 but evals True. + """ + + def __add__(self, other): + return other + + def __radd__(self, other): + return other +trueyzero = TrueyZero() + + +_SHUTDOWNREQUEST = None + + +class WorkerThread(threading.Thread): + + """Thread which continuously polls a Queue for Connection objects. + + Due to the timing issues of polling a Queue, a WorkerThread does not + check its own 'ready' flag after it has started. To stop the thread, + it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue + (one for each running WorkerThread). + """ + + conn = None + """The current connection pulled off the Queue, or None.""" + + server = None + """The HTTP Server which spawned this thread, and which owns the + Queue and is placing active connections into it.""" + + ready = False + """A simple flag for the calling server to know when this thread + has begun polling the Queue.""" + + def __init__(self, server): + self.ready = False + self.server = server + + self.requests_seen = 0 + self.bytes_read = 0 + self.bytes_written = 0 + self.start_time = None + self.work_time = 0 + self.stats = { + 'Requests': lambda s: self.requests_seen + ( + (self.start_time is None) and + trueyzero or + self.conn.requests_seen + ), + 'Bytes Read': lambda s: self.bytes_read + ( + (self.start_time is None) and + trueyzero or + self.conn.rfile.bytes_read + ), + 'Bytes Written': lambda s: self.bytes_written + ( + (self.start_time is None) and + trueyzero or + self.conn.wfile.bytes_written + ), + 'Work Time': lambda s: self.work_time + ( + (self.start_time is None) and + trueyzero or + time.time() - self.start_time + ), + 'Read Throughput': lambda s: s['Bytes Read'](s) / ( + s['Work Time'](s) or 1e-6), + 'Write Throughput': lambda s: s['Bytes Written'](s) / ( + s['Work Time'](s) or 1e-6), + } + threading.Thread.__init__(self) + + def run(self): + self.server.stats['Worker Threads'][self.getName()] = self.stats + try: + self.ready = True + while True: + conn = self.server.requests.get() + if conn is _SHUTDOWNREQUEST: + return + + self.conn = conn + if self.server.stats['Enabled']: + self.start_time = time.time() + try: + conn.communicate() + finally: + conn.close() + if self.server.stats['Enabled']: + self.requests_seen += self.conn.requests_seen + self.bytes_read += self.conn.rfile.bytes_read + self.bytes_written += self.conn.wfile.bytes_written + self.work_time += time.time() - self.start_time + self.start_time = None + self.conn = None + except (KeyboardInterrupt, SystemExit): + exc = sys.exc_info()[1] + self.server.interrupt = exc + + +class ThreadPool(object): + + """A Request Queue for an HTTPServer which pools threads. + + ThreadPool objects must provide min, get(), put(obj), start() + and stop(timeout) attributes. + """ + + def __init__(self, server, min=10, max=-1, + accepted_queue_size=-1, accepted_queue_timeout=10): + self.server = server + self.min = min + self.max = max + self._threads = [] + self._queue = queue.Queue(maxsize=accepted_queue_size) + self._queue_put_timeout = accepted_queue_timeout + self.get = self._queue.get + + def start(self): + """Start the pool of threads.""" + for i in range(self.min): + self._threads.append(WorkerThread(self.server)) + for worker in self._threads: + worker.setName("CP Server " + worker.getName()) + worker.start() + for worker in self._threads: + while not worker.ready: + time.sleep(.1) + + def _get_idle(self): + """Number of worker threads which are idle. Read-only.""" + return len([t for t in self._threads if t.conn is None]) + idle = property(_get_idle, doc=_get_idle.__doc__) + + def put(self, obj): + self._queue.put(obj, block=True, timeout=self._queue_put_timeout) + if obj is _SHUTDOWNREQUEST: + return + + def grow(self, amount): + """Spawn new worker threads (not above self.max).""" + if self.max > 0: + budget = max(self.max - len(self._threads), 0) + else: + # self.max <= 0 indicates no maximum + budget = float('inf') + + n_new = min(amount, budget) + + workers = [self._spawn_worker() for i in range(n_new)] + while not all(worker.ready for worker in workers): + time.sleep(.1) + self._threads.extend(workers) + + def _spawn_worker(self): + worker = WorkerThread(self.server) + worker.setName("CP Server " + worker.getName()) + worker.start() + return worker + + def shrink(self, amount): + """Kill off worker threads (not below self.min).""" + # Grow/shrink the pool if necessary. + # Remove any dead threads from our list + for t in self._threads: + if not t.isAlive(): + self._threads.remove(t) + amount -= 1 + + # calculate the number of threads above the minimum + n_extra = max(len(self._threads) - self.min, 0) + + # don't remove more than amount + n_to_remove = min(amount, n_extra) + + # put shutdown requests on the queue equal to the number of threads + # to remove. As each request is processed by a worker, that worker + # will terminate and be culled from the list. + for n in range(n_to_remove): + self._queue.put(_SHUTDOWNREQUEST) + + def stop(self, timeout=5): + # Must shut down threads here so the code that calls + # this method can know when all threads are stopped. + for worker in self._threads: + self._queue.put(_SHUTDOWNREQUEST) + + # Don't join currentThread (when stop is called inside a request). + current = threading.currentThread() + if timeout and timeout >= 0: + endtime = time.time() + timeout + while self._threads: + worker = self._threads.pop() + if worker is not current and worker.isAlive(): + try: + if timeout is None or timeout < 0: + worker.join() + else: + remaining_time = endtime - time.time() + if remaining_time > 0: + worker.join(remaining_time) + if worker.isAlive(): + # We exhausted the timeout. + # Forcibly shut down the socket. + c = worker.conn + if c and not c.rfile.closed: + try: + c.socket.shutdown(socket.SHUT_RD) + except TypeError: + # pyOpenSSL sockets don't take an arg + c.socket.shutdown() + worker.join() + except (AssertionError, + # Ignore repeated Ctrl-C. + # See + # https://github.com/cherrypy/cherrypy/issues/691. + KeyboardInterrupt): + pass + + def _get_qsize(self): + return self._queue.qsize() + qsize = property(_get_qsize) + + +try: + import fcntl +except ImportError: + try: + from ctypes import windll, WinError + import ctypes.wintypes + _SetHandleInformation = windll.kernel32.SetHandleInformation + _SetHandleInformation.argtypes = [ + ctypes.wintypes.HANDLE, + ctypes.wintypes.DWORD, + ctypes.wintypes.DWORD, + ] + _SetHandleInformation.restype = ctypes.wintypes.BOOL + except ImportError: + def prevent_socket_inheritance(sock): + """Dummy function, since neither fcntl nor ctypes are available.""" + pass + else: + def prevent_socket_inheritance(sock): + """Mark the given socket fd as non-inheritable (Windows).""" + if not _SetHandleInformation(sock.fileno(), 1, 0): + raise WinError() +else: + def prevent_socket_inheritance(sock): + """Mark the given socket fd as non-inheritable (POSIX).""" + fd = sock.fileno() + old_flags = fcntl.fcntl(fd, fcntl.F_GETFD) + fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC) + + +class SSLAdapter(object): + + """Base class for SSL driver library adapters. + + Required methods: + + * ``wrap(sock) -> (wrapped socket, ssl environ dict)`` + * ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) -> + socket file object`` + """ + + def __init__(self, certificate, private_key, certificate_chain=None): + self.certificate = certificate + self.private_key = private_key + self.certificate_chain = certificate_chain + + def wrap(self, sock): + raise NotImplemented + + def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): + raise NotImplemented + + +class HTTPServer(object): + + """An HTTP server.""" + + _bind_addr = "127.0.0.1" + _interrupt = None + + gateway = None + """A Gateway instance.""" + + minthreads = None + """The minimum number of worker threads to create (default 10).""" + + maxthreads = None + """The maximum number of worker threads to create (default -1 = no limit). + """ + + server_name = None + """The name of the server; defaults to socket.gethostname().""" + + protocol = "HTTP/1.1" + """The version string to write in the Status-Line of all HTTP responses. + + For example, "HTTP/1.1" is the default. This also limits the supported + features used in the response.""" + + request_queue_size = 5 + """The 'backlog' arg to socket.listen(); max queued connections + (default 5). + """ + + shutdown_timeout = 5 + """The total time, in seconds, to wait for worker threads to cleanly exit. + """ + + timeout = 10 + """The timeout in seconds for accepted connections (default 10).""" + + version = "CherryPy/" + cp_version + """A version string for the HTTPServer.""" + + software = None + """The value to set for the SERVER_SOFTWARE entry in the WSGI environ. + + If None, this defaults to ``'%s Server' % self.version``.""" + + ready = False + """An internal flag which marks whether the socket is accepting + connections. + """ + + max_request_header_size = 0 + """The maximum size, in bytes, for request headers, or 0 for no limit.""" + + max_request_body_size = 0 + """The maximum size, in bytes, for request bodies, or 0 for no limit.""" + + nodelay = True + """If True (the default since 3.1), sets the TCP_NODELAY socket option.""" + + ConnectionClass = HTTPConnection + """The class to use for handling HTTP connections.""" + + ssl_adapter = None + """An instance of SSLAdapter (or a subclass). + + You must have the corresponding SSL driver library installed.""" + + def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1, + server_name=None): + self.bind_addr = bind_addr + self.gateway = gateway + + self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads) + + if not server_name: + server_name = socket.gethostname() + self.server_name = server_name + self.clear_stats() + + def clear_stats(self): + self._start_time = None + self._run_time = 0 + self.stats = { + 'Enabled': False, + 'Bind Address': lambda s: repr(self.bind_addr), + 'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(), + 'Accepts': 0, + 'Accepts/sec': lambda s: s['Accepts'] / self.runtime(), + 'Queue': lambda s: getattr(self.requests, "qsize", None), + 'Threads': lambda s: len(getattr(self.requests, "_threads", [])), + 'Threads Idle': lambda s: getattr(self.requests, "idle", None), + 'Socket Errors': 0, + 'Requests': lambda s: (not s['Enabled']) and -1 or sum( + [w['Requests'](w) for w in s['Worker Threads'].values()], 0), + 'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum( + [w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0), + 'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum( + [w['Bytes Written'](w) for w in s['Worker Threads'].values()], + 0), + 'Work Time': lambda s: (not s['Enabled']) and -1 or sum( + [w['Work Time'](w) for w in s['Worker Threads'].values()], 0), + 'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum( + [w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6) + for w in s['Worker Threads'].values()], 0), + 'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum( + [w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6) + for w in s['Worker Threads'].values()], 0), + 'Worker Threads': {}, + } + logging.statistics["CherryPy HTTPServer %d" % id(self)] = self.stats + + def runtime(self): + if self._start_time is None: + return self._run_time + else: + return self._run_time + (time.time() - self._start_time) + + def __str__(self): + return "%s.%s(%r)" % (self.__module__, self.__class__.__name__, + self.bind_addr) + + def _get_bind_addr(self): + return self._bind_addr + + def _set_bind_addr(self, value): + if isinstance(value, tuple) and value[0] in ('', None): + # Despite the socket module docs, using '' does not + # allow AI_PASSIVE to work. Passing None instead + # returns '0.0.0.0' like we want. In other words: + # host AI_PASSIVE result + # '' Y 192.168.x.y + # '' N 192.168.x.y + # None Y 0.0.0.0 + # None N 127.0.0.1 + # But since you can get the same effect with an explicit + # '0.0.0.0', we deny both the empty string and None as values. + raise ValueError("Host values of '' or None are not allowed. " + "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead " + "to listen on all active interfaces.") + self._bind_addr = value + bind_addr = property( + _get_bind_addr, + _set_bind_addr, + doc="""The interface on which to listen for connections. + + For TCP sockets, a (host, port) tuple. Host values may be any IPv4 + or IPv6 address, or any valid hostname. The string 'localhost' is a + synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6). + The string '0.0.0.0' is a special IPv4 entry meaning "any active + interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for + IPv6. The empty string or None are not allowed. + + For UNIX sockets, supply the filename as a string. + + Systemd socket activation is automatic and doesn't require tempering + with this variable""") + + def start(self): + """Run the server forever.""" + # We don't have to trap KeyboardInterrupt or SystemExit here, + # because cherrpy.server already does so, calling self.stop() for us. + # If you're using this server with another framework, you should + # trap those exceptions in whatever code block calls start(). + self._interrupt = None + + if self.software is None: + self.software = "%s Server" % self.version + + # Select the appropriate socket + self.socket = None + if os.getenv('LISTEN_PID', None): + # systemd socket activation + self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM) + elif isinstance(self.bind_addr, six.string_types): + # AF_UNIX socket + + # So we can reuse the socket... + try: + os.unlink(self.bind_addr) + except: + pass + + # So everyone can access the socket... + try: + os.chmod(self.bind_addr, 0o777) + except: + pass + + info = [ + (socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)] + else: + # AF_INET or AF_INET6 socket + # Get the correct address family for our host (allows IPv6 + # addresses) + host, port = self.bind_addr + try: + info = socket.getaddrinfo( + host, port, socket.AF_UNSPEC, + socket.SOCK_STREAM, 0, socket.AI_PASSIVE) + except socket.gaierror: + if ':' in self.bind_addr[0]: + info = [(socket.AF_INET6, socket.SOCK_STREAM, + 0, "", self.bind_addr + (0, 0))] + else: + info = [(socket.AF_INET, socket.SOCK_STREAM, + 0, "", self.bind_addr)] + + if not self.socket: + msg = "No socket could be created" + for res in info: + af, socktype, proto, canonname, sa = res + try: + self.bind(af, socktype, proto) + break + except socket.error as serr: + msg = "%s -- (%s: %s)" % (msg, sa, serr) + if self.socket: + self.socket.close() + self.socket = None + + if not self.socket: + raise socket.error(msg) + + # Timeout so KeyboardInterrupt can be caught on Win32 + self.socket.settimeout(1) + self.socket.listen(self.request_queue_size) + + # Create worker threads + self.requests.start() + + self.ready = True + self._start_time = time.time() + while self.ready: + try: + self.tick() + except (KeyboardInterrupt, SystemExit): + raise + except: + self.error_log("Error in HTTPServer.tick", level=logging.ERROR, + traceback=True) + + if self.interrupt: + while self.interrupt is True: + # Wait for self.stop() to complete. See _set_interrupt. + time.sleep(0.1) + if self.interrupt: + raise self.interrupt + + def error_log(self, msg="", level=20, traceback=False): + # Override this in subclasses as desired + sys.stderr.write(msg + '\n') + sys.stderr.flush() + if traceback: + tblines = traceback_.format_exc() + sys.stderr.write(tblines) + sys.stderr.flush() + + def bind(self, family, type, proto=0): + """Create (or recreate) the actual socket object.""" + self.socket = socket.socket(family, type, proto) + prevent_socket_inheritance(self.socket) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if self.nodelay and not isinstance(self.bind_addr, str): + self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + if self.ssl_adapter is not None: + self.socket = self.ssl_adapter.bind(self.socket) + + # If listening on the IPV6 any address ('::' = IN6ADDR_ANY), + # activate dual-stack. See + # https://github.com/cherrypy/cherrypy/issues/871. + if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 + and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')): + try: + self.socket.setsockopt( + socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) + except (AttributeError, socket.error): + # Apparently, the socket option is not available in + # this machine's TCP stack + pass + + self.socket.bind(self.bind_addr) + + def tick(self): + """Accept a new connection and put it on the Queue.""" + try: + s, addr = self.socket.accept() + if self.stats['Enabled']: + self.stats['Accepts'] += 1 + if not self.ready: + return + + prevent_socket_inheritance(s) + if hasattr(s, 'settimeout'): + s.settimeout(self.timeout) + + makefile = CP_makefile + ssl_env = {} + # if ssl cert and key are set, we try to be a secure HTTP server + if self.ssl_adapter is not None: + try: + s, ssl_env = self.ssl_adapter.wrap(s) + except NoSSLError: + msg = ("The client sent a plain HTTP request, but " + "this server only speaks HTTPS on this port.") + buf = ["%s 400 Bad Request\r\n" % self.protocol, + "Content-Length: %s\r\n" % len(msg), + "Content-Type: text/plain\r\n\r\n", + msg] + + sock_to_make = s if six.PY3 else s._sock + wfile = makefile(sock_to_make, "wb", DEFAULT_BUFFER_SIZE) + try: + wfile.write(ntob("".join(buf))) + except socket.error: + x = sys.exc_info()[1] + if x.args[0] not in socket_errors_to_ignore: + raise + return + if not s: + return + makefile = self.ssl_adapter.makefile + # Re-apply our timeout since we may have a new socket object + if hasattr(s, 'settimeout'): + s.settimeout(self.timeout) + + conn = self.ConnectionClass(self, s, makefile) + + if not isinstance(self.bind_addr, six.string_types): + # optional values + # Until we do DNS lookups, omit REMOTE_HOST + if addr is None: # sometimes this can happen + # figure out if AF_INET or AF_INET6. + if len(s.getsockname()) == 2: + # AF_INET + addr = ('0.0.0.0', 0) + else: + # AF_INET6 + addr = ('::', 0) + conn.remote_addr = addr[0] + conn.remote_port = addr[1] + + conn.ssl_env = ssl_env + + try: + self.requests.put(conn) + except queue.Full: + # Just drop the conn. TODO: write 503 back? + conn.close() + return + except socket.timeout: + # The only reason for the timeout in start() is so we can + # notice keyboard interrupts on Win32, which don't interrupt + # accept() by default + return + except socket.error: + x = sys.exc_info()[1] + if self.stats['Enabled']: + self.stats['Socket Errors'] += 1 + if x.args[0] in socket_error_eintr: + # I *think* this is right. EINTR should occur when a signal + # is received during the accept() call; all docs say retry + # the call, and I *think* I'm reading it right that Python + # will then go ahead and poll for and handle the signal + # elsewhere. See + # https://github.com/cherrypy/cherrypy/issues/707. + return + if x.args[0] in socket_errors_nonblocking: + # Just try again. See + # https://github.com/cherrypy/cherrypy/issues/479. + return + if x.args[0] in socket_errors_to_ignore: + # Our socket was closed. + # See https://github.com/cherrypy/cherrypy/issues/686. + return + raise + + def _get_interrupt(self): + return self._interrupt + + def _set_interrupt(self, interrupt): + self._interrupt = True + self.stop() + self._interrupt = interrupt + interrupt = property(_get_interrupt, _set_interrupt, + doc="Set this to an Exception instance to " + "interrupt the server.") + + def stop(self): + """Gracefully shutdown a server that is serving forever.""" + self.ready = False + if self._start_time is not None: + self._run_time += (time.time() - self._start_time) + self._start_time = None + + sock = getattr(self, "socket", None) + if sock: + if not isinstance(self.bind_addr, six.string_types): + # Touch our own socket to make accept() return immediately. + try: + host, port = sock.getsockname()[:2] + except socket.error: + x = sys.exc_info()[1] + if x.args[0] not in socket_errors_to_ignore: + # Changed to use error code and not message + # See + # https://github.com/cherrypy/cherrypy/issues/860. + raise + else: + # Note that we're explicitly NOT using AI_PASSIVE, + # here, because we want an actual IP to touch. + # localhost won't work if we've bound to a public IP, + # but it will if we bound to '0.0.0.0' (INADDR_ANY). + for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, + socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + s = None + try: + s = socket.socket(af, socktype, proto) + # See + # http://groups.google.com/group/cherrypy-users/ + # browse_frm/thread/bbfe5eb39c904fe0 + s.settimeout(1.0) + s.connect((host, port)) + s.close() + except socket.error: + if s: + s.close() + if hasattr(sock, "close"): + sock.close() + self.socket = None + + self.requests.stop(self.shutdown_timeout) + + +class Gateway(object): + + """A base class to interface HTTPServer with other systems, such as WSGI. + """ + + def __init__(self, req): + self.req = req + + def respond(self): + """Process the current request. Must be overridden in a subclass.""" + raise NotImplemented + + +# These may either be wsgiserver.SSLAdapter subclasses or the string names +# of such classes (in which case they will be lazily loaded). +ssl_adapters = { + 'builtin': 'cherrypy.wsgiserver.ssl_builtin.BuiltinSSLAdapter', + 'pyopenssl': 'cherrypy.wsgiserver.ssl_pyopenssl.pyOpenSSLAdapter', +} + + +def get_ssl_adapter_class(name='builtin'): + """Return an SSL adapter class for the given name.""" + adapter = ssl_adapters[name.lower()] + if isinstance(adapter, six.string_types): + last_dot = adapter.rfind(".") + attr_name = adapter[last_dot + 1:] + mod_path = adapter[:last_dot] + + try: + mod = sys.modules[mod_path] + if mod is None: + raise KeyError() + except KeyError: + # The last [''] is important. + mod = __import__(mod_path, globals(), locals(), ['']) + + # Let an AttributeError propagate outward. + try: + adapter = getattr(mod, attr_name) + except AttributeError: + raise AttributeError("'%s' object has no attribute '%s'" + % (mod_path, attr_name)) + + return adapter + +# ------------------------------- WSGI Stuff -------------------------------- # + + +class CherryPyWSGIServer(HTTPServer): + + """A subclass of HTTPServer which calls a WSGI application.""" + + wsgi_version = (1, 0) + """The version of WSGI to produce.""" + + def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None, + max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5, + accepted_queue_size=-1, accepted_queue_timeout=10): + self.requests = ThreadPool(self, min=numthreads or 1, max=max, + accepted_queue_size=accepted_queue_size, + accepted_queue_timeout=accepted_queue_timeout) + self.wsgi_app = wsgi_app + self.gateway = wsgi_gateways[self.wsgi_version] + + self.bind_addr = bind_addr + if not server_name: + server_name = socket.gethostname() + self.server_name = server_name + self.request_queue_size = request_queue_size + + self.timeout = timeout + self.shutdown_timeout = shutdown_timeout + self.clear_stats() + + def _get_numthreads(self): + return self.requests.min + + def _set_numthreads(self, value): + self.requests.min = value + numthreads = property(_get_numthreads, _set_numthreads) + + +class WSGIGateway(Gateway): + + """A base class to interface HTTPServer with WSGI.""" + + def __init__(self, req): + self.req = req + self.started_response = False + self.env = self.get_environ() + self.remaining_bytes_out = None + + def get_environ(self): + """Return a new environ dict targeting the given wsgi.version""" + raise NotImplemented + + def respond(self): + """Process the current request.""" + + """ + From PEP 333: + + The start_response callable must not actually transmit + the response headers. Instead, it must store them for the + server or gateway to transmit only after the first + iteration of the application return value that yields + a NON-EMPTY string, or upon the application's first + invocation of the write() callable. + """ + + response = self.req.server.wsgi_app(self.env, self.start_response) + try: + for chunk in filter(None, response): + if not isinstance(chunk, six.binary_type): + raise ValueError("WSGI Applications must yield bytes") + self.write(chunk) + finally: + if hasattr(response, "close"): + response.close() + + def start_response(self, status, headers, exc_info=None): + """ + WSGI callable to begin the HTTP response. + """ + # "The application may call start_response more than once, + # if and only if the exc_info argument is provided." + if self.started_response and not exc_info: + raise AssertionError("WSGI start_response called a second " + "time with no exc_info.") + self.started_response = True + + # "if exc_info is provided, and the HTTP headers have already been + # sent, start_response must raise an error, and should raise the + # exc_info tuple." + if self.req.sent_headers: + try: + six.reraise(*exc_info) + finally: + exc_info = None + + self.req.status = self._encode_status(status) + + for k, v in headers: + if not isinstance(k, str): + raise TypeError( + "WSGI response header key %r is not of type str." % k) + if not isinstance(v, str): + raise TypeError( + "WSGI response header value %r is not of type str." % v) + if k.lower() == 'content-length': + self.remaining_bytes_out = int(v) + out_header = ntob(k), ntob(v) + self.req.outheaders.append(out_header) + + return self.write + + @staticmethod + def _encode_status(status): + """ + According to PEP 3333, when using Python 3, the response status + and headers must be bytes masquerading as unicode; that is, they + must be of type "str" but are restricted to code points in the + "latin-1" set. + """ + if six.PY2: + return status + if not isinstance(status, str): + raise TypeError("WSGI response status is not of type str.") + return status.encode('ISO-8859-1') + + def write(self, chunk): + """WSGI callable to write unbuffered data to the client. + + This method is also used internally by start_response (to write + data from the iterable returned by the WSGI application). + """ + if not self.started_response: + raise AssertionError("WSGI write called before start_response.") + + chunklen = len(chunk) + rbo = self.remaining_bytes_out + if rbo is not None and chunklen > rbo: + if not self.req.sent_headers: + # Whew. We can send a 500 to the client. + self.req.simple_response( + "500 Internal Server Error", + "The requested resource returned more bytes than the " + "declared Content-Length.") + else: + # Dang. We have probably already sent data. Truncate the chunk + # to fit (so the client doesn't hang) and raise an error later. + chunk = chunk[:rbo] + + if not self.req.sent_headers: + self.req.sent_headers = True + self.req.send_headers() + + self.req.write(chunk) + + if rbo is not None: + rbo -= chunklen + if rbo < 0: + raise ValueError( + "Response body exceeds the declared Content-Length.") + + +class WSGIGateway_10(WSGIGateway): + + """A Gateway class to interface HTTPServer with WSGI 1.0.x.""" + + def get_environ(self): + """Return a new environ dict targeting the given wsgi.version""" + req = self.req + env = { + # set a non-standard environ entry so the WSGI app can know what + # the *real* server protocol is (and what features to support). + # See http://www.faqs.org/rfcs/rfc2145.html. + 'ACTUAL_SERVER_PROTOCOL': req.server.protocol, + 'PATH_INFO': bton(req.path), + 'QUERY_STRING': bton(req.qs), + 'REMOTE_ADDR': req.conn.remote_addr or '', + 'REMOTE_PORT': str(req.conn.remote_port or ''), + 'REQUEST_METHOD': bton(req.method), + 'REQUEST_URI': bton(req.uri), + 'SCRIPT_NAME': '', + 'SERVER_NAME': req.server.server_name, + # Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol. + 'SERVER_PROTOCOL': bton(req.request_protocol), + 'SERVER_SOFTWARE': req.server.software, + 'wsgi.errors': sys.stderr, + 'wsgi.input': req.rfile, + 'wsgi.multiprocess': False, + 'wsgi.multithread': True, + 'wsgi.run_once': False, + 'wsgi.url_scheme': bton(req.scheme), + 'wsgi.version': (1, 0), + } + + if isinstance(req.server.bind_addr, six.string_types): + # AF_UNIX. This isn't really allowed by WSGI, which doesn't + # address unix domain sockets. But it's better than nothing. + env["SERVER_PORT"] = "" + else: + env["SERVER_PORT"] = str(req.server.bind_addr[1]) + + # Request headers + env.update( + ("HTTP_" + bton(k).upper().replace("-", "_"), bton(v)) + for k, v in req.inheaders.items() + ) + + # CONTENT_TYPE/CONTENT_LENGTH + ct = env.pop("HTTP_CONTENT_TYPE", None) + if ct is not None: + env["CONTENT_TYPE"] = ct + cl = env.pop("HTTP_CONTENT_LENGTH", None) + if cl is not None: + env["CONTENT_LENGTH"] = cl + + if req.conn.ssl_env: + env.update(req.conn.ssl_env) + + return env + + +class WSGIGateway_u0(WSGIGateway_10): + + """A Gateway class to interface HTTPServer with WSGI u.0. + + WSGI u.0 is an experimental protocol, which uses unicode for keys + and values in both Python 2 and Python 3. + """ + + def get_environ(self): + """Return a new environ dict targeting the given wsgi.version""" + req = self.req + env_10 = WSGIGateway_10.get_environ(self) + env = dict(map(self._decode_key, self.items())) + env[six.u('wsgi.version')] = ('u', 0) + + # Request-URI + enc = env.setdefault(six.u('wsgi.url_encoding'), six.u('utf-8')) + try: + env["PATH_INFO"] = req.path.decode(enc) + env["QUERY_STRING"] = req.qs.decode(enc) + except UnicodeDecodeError: + # Fall back to latin 1 so apps can transcode if needed. + env['wsgi.url_encoding'] = 'ISO-8859-1' + env["PATH_INFO"] = env_10["PATH_INFO"] + env["QUERY_STRING"] = env_10["QUERY_STRING"] + + env.update(map(self._decode_value, self.items())) + + return env + + @staticmethod + def _decode_key(k, v): + if six.PY2: + k = k.decode('ISO-8859-1') + return k, v + + @staticmethod + def _decode_value(k, v): + skip_keys = 'REQUEST_URI', 'wsgi.input' + if six.PY3 or not isinstance(v, bytes) or k in skip_keys: + return k, v + return k, v.decode('ISO-8859-1') + + +wsgi_gateways = { + (1, 0): WSGIGateway_10, + ('u', 0): WSGIGateway_u0, +} + + +class WSGIPathInfoDispatcher(object): + + """A WSGI dispatcher for dispatch based on the PATH_INFO. + + apps: a dict or list of (path_prefix, app) pairs. + """ + + def __init__(self, apps): + try: + apps = list(apps.items()) + except AttributeError: + pass + + # Sort the apps by len(path), descending + by_path_len = lambda app: len(app[0]) + apps.sort(key=by_path_len, reverse=True) + + # The path_prefix strings must start, but not end, with a slash. + # Use "" instead of "/". + self.apps = [(p.rstrip("/"), a) for p, a in apps] + + def __call__(self, environ, start_response): + path = environ["PATH_INFO"] or "/" + for p, app in self.apps: + # The apps list should be sorted by length, descending. + if path.startswith(p + "/") or path == p: + environ = environ.copy() + environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p + environ["PATH_INFO"] = path[len(p):] + return app(environ, start_response) + + start_response('404 Not Found', [('Content-Type', 'text/plain'), + ('Content-Length', '0')]) + return [''] diff --git a/deps/cherrypy/wsgiserver/__pycache__/__init__.cpython-34.pyc b/deps/cherrypy/wsgiserver/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..91bf366d Binary files /dev/null and b/deps/cherrypy/wsgiserver/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/cherrypy/wsgiserver/__pycache__/ssl_builtin.cpython-34.pyc b/deps/cherrypy/wsgiserver/__pycache__/ssl_builtin.cpython-34.pyc new file mode 100644 index 00000000..cd37f67c Binary files /dev/null and b/deps/cherrypy/wsgiserver/__pycache__/ssl_builtin.cpython-34.pyc differ diff --git a/deps/cherrypy/wsgiserver/ssl_builtin.py b/deps/cherrypy/wsgiserver/ssl_builtin.py new file mode 100644 index 00000000..c8585b71 --- /dev/null +++ b/deps/cherrypy/wsgiserver/ssl_builtin.py @@ -0,0 +1,107 @@ +"""A library for integrating Python's builtin ``ssl`` library with CherryPy. + +The ssl module must be importable for SSL functionality. + +To use this module, set ``CherryPyWSGIServer.ssl_adapter`` to an instance of +``BuiltinSSLAdapter``. +""" + +try: + import ssl +except ImportError: + ssl = None + +try: + from _pyio import DEFAULT_BUFFER_SIZE +except ImportError: + try: + from io import DEFAULT_BUFFER_SIZE + except ImportError: + DEFAULT_BUFFER_SIZE = -1 + +import sys + +from cherrypy import wsgiserver + + +class BuiltinSSLAdapter(wsgiserver.SSLAdapter): + + """A wrapper for integrating Python's builtin ssl module with CherryPy.""" + + certificate = None + """The filename of the server SSL certificate.""" + + private_key = None + """The filename of the server's private key file.""" + + certificate_chain = None + """The filename of the certificate chain file.""" + + """The ssl.SSLContext that will be used to wrap sockets where available + (on Python > 2.7.9 / 3.3) + """ + context = None + + def __init__(self, certificate, private_key, certificate_chain=None): + if ssl is None: + raise ImportError("You must install the ssl module to use HTTPS.") + self.certificate = certificate + self.private_key = private_key + self.certificate_chain = certificate_chain + if hasattr(ssl, 'create_default_context'): + self.context = ssl.create_default_context( + purpose=ssl.Purpose.CLIENT_AUTH, + cafile=certificate_chain + ) + self.context.load_cert_chain(certificate, private_key) + + def bind(self, sock): + """Wrap and return the given socket.""" + return sock + + def wrap(self, sock): + """Wrap and return the given socket, plus WSGI environ entries.""" + try: + if self.context is not None: + s = self.context.wrap_socket(sock,do_handshake_on_connect=True, + server_side=True) + else: + s = ssl.wrap_socket(sock, do_handshake_on_connect=True, + server_side=True, certfile=self.certificate, + keyfile=self.private_key, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.certificate_chain) + except ssl.SSLError: + e = sys.exc_info()[1] + if e.errno == ssl.SSL_ERROR_EOF: + # This is almost certainly due to the cherrypy engine + # 'pinging' the socket to assert it's connectable; + # the 'ping' isn't SSL. + return None, {} + elif e.errno == ssl.SSL_ERROR_SSL: + if e.args[1].endswith('http request'): + # The client is speaking HTTP to an HTTPS server. + raise wsgiserver.NoSSLError + elif e.args[1].endswith('unknown protocol'): + # The client is speaking some non-HTTP protocol. + # Drop the conn. + return None, {} + raise + return s, self.get_environ(s) + + # TODO: fill this out more with mod ssl env + def get_environ(self, sock): + """Create WSGI environ entries to be merged into each request.""" + cipher = sock.cipher() + ssl_environ = { + "wsgi.url_scheme": "https", + "HTTPS": "on", + 'SSL_PROTOCOL': cipher[1], + 'SSL_CIPHER': cipher[0] + # SSL_VERSION_INTERFACE string The mod_ssl program version + # SSL_VERSION_LIBRARY string The OpenSSL program version + } + return ssl_environ + + def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): + return wsgiserver.CP_makefile(sock, mode, bufsize) diff --git a/deps/cookies-2.2.1.dist-info/DESCRIPTION.rst b/deps/cookies-2.2.1.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..6c04d8b1 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,90 @@ +What is this and what is it for? +-------------------------------- + +cookies.py is a Python module for working with HTTP cookies: parsing and +rendering 'Cookie:' request headers and 'Set-Cookie:' response headers, +and exposing a convenient API for creating and modifying cookies. It can be +used as a replacement of Python's Cookie.py (aka http.cookies). + +Features +-------- + +* Rendering according to the excellent new RFC 6265 + (rather than using a unique ad hoc format inconsistently relating to + unrealistic, very old RFCs which everyone ignored). Uses URL encoding to + represent non-ASCII by default, like many other languages' libraries +* Liberal parsing, incorporating many complaints about Cookie.py barfing + on common cookie formats which can be reliably parsed (e.g. search 'cookie' + on the Python issue tracker) +* Well-documented code, with chapter and verse from RFCs + (rather than arbitrary, undocumented decisions and huge tables of magic + values, as you see in Cookie.py). +* Test coverage at 100%, with a much more comprehensive test suite + than Cookie.py +* Single-source compatible with the following Python versions: + 2.6, 2.7, 3.2, 3.3 and PyPy (2.7). +* Cleaner, less surprising API:: + + # old Cookie.py - this code is all directly from its docstring + >>> from Cookie import SmartCookie + >>> C = SmartCookie() + >>> # n.b. it's "smart" because it automatically pickles Python objects, + >>> # which is actually quite stupid for security reasons! + >>> C["rocky"] = "road" + >>> C["rocky"]["path"] = "/cookie" + >>> # So C["rocky"] is a string, except when it's a dict... + >>> # and why do I have to write [""] to access a fixed set of attrs? + >>> # Look at the atrocious way I render out a request header: + >>> C.output(attrs=[], header="Cookie:") + 'Cookie: rocky=road' + + # new cookies.py + >>> from cookies import Cookies, Cookie + >>> cookies = Cookies(rocky='road') + >>> # Can also write explicitly: cookies['rocky'] = Cookie['road'] + >>> cookies['rocky'].path = "/cookie" + >>> cookies.render_request() + 'rocky=road' +* Friendly to customization, extension, and reuse of its parts. + Unlike Cookie.py, it doesn't lock all implementation inside its own classes + (forcing you to write ugly wrappers as Django, Trac, Werkzeug/Flask, web.py + and Tornado had to do). You can suppress minor parse exceptions with + parameters rather than subclass wrappers. You can plug in your own parsers, + renderers and validators for new or existing cookie attributes. You can + render the data out in a dict. You can easily use the underlying imperative + API or even lift the parser's regexps for your own parser or project. They + are very well documented and relate directly to RFCs, so you know exactly + what you are getting and why. It's MIT-licensed so do + what you want (but I'd love to know what use you are getting from it!) +* One file, so you can just drop cookies.py into your project if you like +* MIT license, so you can use it in whatever you want with no strings + +Things this is not meant to do +------------------------------ +While this is intended to be a good module for handling cookies, it does not +even try to do any of the following: + +* Maintain backward compatibility with Cookie.py, which would mean + inheriting its confusions and bugs +* Implement RFCs 2109 or 2965, which have always been ignored by almost + everyone and are now obsolete as well +* Handle every conceivable output from terrible legacy apps, which is not + possible to do without lots of silent data loss and corruption (the + parser does try to be liberal as possible otherwise, though) +* Provide a means to store pickled Python objects in cookie values + (that's a big security hole) + +This doesn't compete with the cookielib (http.cookiejar) module in the Python +standard library, which is specifically for implementing cookie storage and +similar behavior in an HTTP client such as a browser. Things cookielib does +that this doesn't: + +* Write to or read from browsers' cookie stores or other proprietary + formats for storing cookie data in files +* Handle the browser/client logic like deciding which cookies to send or + discard, etc. + +If you are looking for a cookie library but neither this one nor cookielib +will help, you might also consider the implementations in WebOb or Bottle. + + diff --git a/deps/cookies-2.2.1.dist-info/INSTALLER b/deps/cookies-2.2.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/cookies-2.2.1.dist-info/METADATA b/deps/cookies-2.2.1.dist-info/METADATA new file mode 100644 index 00000000..b523ed38 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/METADATA @@ -0,0 +1,111 @@ +Metadata-Version: 2.0 +Name: cookies +Version: 2.2.1 +Summary: Friendlier RFC 6265-compliant cookie parser/renderer +Home-page: https://github.com/sashahart/cookies +Author: Sasha Hart +Author-email: s@sashahart.net +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Other Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules + +What is this and what is it for? +-------------------------------- + +cookies.py is a Python module for working with HTTP cookies: parsing and +rendering 'Cookie:' request headers and 'Set-Cookie:' response headers, +and exposing a convenient API for creating and modifying cookies. It can be +used as a replacement of Python's Cookie.py (aka http.cookies). + +Features +-------- + +* Rendering according to the excellent new RFC 6265 + (rather than using a unique ad hoc format inconsistently relating to + unrealistic, very old RFCs which everyone ignored). Uses URL encoding to + represent non-ASCII by default, like many other languages' libraries +* Liberal parsing, incorporating many complaints about Cookie.py barfing + on common cookie formats which can be reliably parsed (e.g. search 'cookie' + on the Python issue tracker) +* Well-documented code, with chapter and verse from RFCs + (rather than arbitrary, undocumented decisions and huge tables of magic + values, as you see in Cookie.py). +* Test coverage at 100%, with a much more comprehensive test suite + than Cookie.py +* Single-source compatible with the following Python versions: + 2.6, 2.7, 3.2, 3.3 and PyPy (2.7). +* Cleaner, less surprising API:: + + # old Cookie.py - this code is all directly from its docstring + >>> from Cookie import SmartCookie + >>> C = SmartCookie() + >>> # n.b. it's "smart" because it automatically pickles Python objects, + >>> # which is actually quite stupid for security reasons! + >>> C["rocky"] = "road" + >>> C["rocky"]["path"] = "/cookie" + >>> # So C["rocky"] is a string, except when it's a dict... + >>> # and why do I have to write [""] to access a fixed set of attrs? + >>> # Look at the atrocious way I render out a request header: + >>> C.output(attrs=[], header="Cookie:") + 'Cookie: rocky=road' + + # new cookies.py + >>> from cookies import Cookies, Cookie + >>> cookies = Cookies(rocky='road') + >>> # Can also write explicitly: cookies['rocky'] = Cookie['road'] + >>> cookies['rocky'].path = "/cookie" + >>> cookies.render_request() + 'rocky=road' +* Friendly to customization, extension, and reuse of its parts. + Unlike Cookie.py, it doesn't lock all implementation inside its own classes + (forcing you to write ugly wrappers as Django, Trac, Werkzeug/Flask, web.py + and Tornado had to do). You can suppress minor parse exceptions with + parameters rather than subclass wrappers. You can plug in your own parsers, + renderers and validators for new or existing cookie attributes. You can + render the data out in a dict. You can easily use the underlying imperative + API or even lift the parser's regexps for your own parser or project. They + are very well documented and relate directly to RFCs, so you know exactly + what you are getting and why. It's MIT-licensed so do + what you want (but I'd love to know what use you are getting from it!) +* One file, so you can just drop cookies.py into your project if you like +* MIT license, so you can use it in whatever you want with no strings + +Things this is not meant to do +------------------------------ +While this is intended to be a good module for handling cookies, it does not +even try to do any of the following: + +* Maintain backward compatibility with Cookie.py, which would mean + inheriting its confusions and bugs +* Implement RFCs 2109 or 2965, which have always been ignored by almost + everyone and are now obsolete as well +* Handle every conceivable output from terrible legacy apps, which is not + possible to do without lots of silent data loss and corruption (the + parser does try to be liberal as possible otherwise, though) +* Provide a means to store pickled Python objects in cookie values + (that's a big security hole) + +This doesn't compete with the cookielib (http.cookiejar) module in the Python +standard library, which is specifically for implementing cookie storage and +similar behavior in an HTTP client such as a browser. Things cookielib does +that this doesn't: + +* Write to or read from browsers' cookie stores or other proprietary + formats for storing cookie data in files +* Handle the browser/client logic like deciding which cookies to send or + discard, etc. + +If you are looking for a cookie library but neither this one nor cookielib +will help, you might also consider the implementations in WebOb or Bottle. + + diff --git a/deps/cookies-2.2.1.dist-info/RECORD b/deps/cookies-2.2.1.dist-info/RECORD new file mode 100644 index 00000000..a1ca6390 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/RECORD @@ -0,0 +1,11 @@ +cookies.py,sha256=sF8kRzufOPGQAu8iiPfynJj2yRNGkUcC-JxvTX9mKQ8,47318 +test_cookies.py,sha256=cbFPYlNzzgTkVzz7Xb_3GqmQ4SE0EEz1gRIF1We5QTY,96777 +cookies-2.2.1.dist-info/RECORD,, +cookies-2.2.1.dist-info/metadata.json,sha256=mQLffEYibwvk8r15ayQPMqbU4RCgtGlL5u59EY-8t6k,901 +cookies-2.2.1.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +cookies-2.2.1.dist-info/DESCRIPTION.rst,sha256=cMKRjszZhygoqs2V6ZKoKQGGtBY5RN_vfTYfd-UYFJ0,4351 +cookies-2.2.1.dist-info/METADATA,sha256=pTGwsy7mjUwouhm4j-E7ld4-rbbUCbiK-bHvwaChN2M,5170 +cookies-2.2.1.dist-info/top_level.txt,sha256=cmWJoCZMIIrsNW2u7GQHmLxsBkrQSFDP-t27J7-E_HQ,21 +cookies-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/test_cookies.cpython-34.pyc,, +__pycache__/cookies.cpython-34.pyc,, diff --git a/deps/cookies-2.2.1.dist-info/WHEEL b/deps/cookies-2.2.1.dist-info/WHEEL new file mode 100644 index 00000000..9dff69d8 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deps/cookies-2.2.1.dist-info/metadata.json b/deps/cookies-2.2.1.dist-info/metadata.json new file mode 100644 index 00000000..0009aea8 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"name": "cookies", "classifiers": ["Development Status :: 4 - Beta", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules"], "generator": "bdist_wheel (0.24.0)", "extensions": {"python.details": {"document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"name": "Sasha Hart", "role": "author", "email": "s@sashahart.net"}], "project_urls": {"Home": "https://github.com/sashahart/cookies"}}}, "version": "2.2.1", "metadata_version": "2.0", "summary": "Friendlier RFC 6265-compliant cookie parser/renderer"} \ No newline at end of file diff --git a/deps/cookies-2.2.1.dist-info/top_level.txt b/deps/cookies-2.2.1.dist-info/top_level.txt new file mode 100644 index 00000000..0358d8a0 --- /dev/null +++ b/deps/cookies-2.2.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +cookies +test_cookies diff --git a/deps/cookies.py b/deps/cookies.py new file mode 100644 index 00000000..d1637d22 --- /dev/null +++ b/deps/cookies.py @@ -0,0 +1,1169 @@ +"""Parse, manipulate and render cookies in a convenient way. + +Copyright (c) 2011-2014, Sasha Hart. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" +__version__ = "2.2.1" +import re +import datetime +import logging +import sys +from unicodedata import normalize +if sys.version_info >= (3, 0, 0): # pragma: no cover + from urllib.parse import ( + quote as _default_quote, unquote as _default_unquote) + basestring = str + long = int +else: # pragma: no cover + from urllib import ( + quote as _default_quote, unquote as _default_unquote) + + +def _total_seconds(td): + """Wrapper to work around lack of .total_seconds() method in Python 3.1. + """ + if hasattr(td, "total_seconds"): + return td.total_seconds() + return td.days * 3600 * 24 + td.seconds + td.microseconds / 100000.0 + +# see test_encoding_assumptions for how these magical safe= parms were figured +# out. the differences are because of what cookie-octet may contain +# vs the more liberal spec for extension-av +default_cookie_quote = lambda item: _default_quote( + item, safe='!#$%&\'()*+/:<=>?@[]^`{|}~') + +default_extension_quote = lambda item: _default_quote( + item, safe=' !"#$%&\'()*+,/:<=>?@[\\]^`{|}~') + +default_unquote = _default_unquote + + +def _report_invalid_cookie(data): + "How this module logs a bad cookie when exception suppressed" + logging.error("invalid Cookie: %r", data) + + +def _report_unknown_attribute(name): + "How this module logs an unknown attribute when exception suppressed" + logging.error("unknown Cookie attribute: %r", name) + + +def _report_invalid_attribute(name, value, reason): + "How this module logs a bad attribute when exception suppressed" + logging.error("invalid Cookie attribute (%s): %r=%r", reason, name, value) + + +class CookieError(Exception): + """Base class for this module's exceptions, so you can catch them all if + you want to. + """ + def __init__(self): + Exception.__init__(self) + + +class InvalidCookieError(CookieError): + """Raised when attempting to parse or construct a cookie which is + syntactically invalid (in any way that has possibly serious implications). + """ + def __init__(self, data=None, message=""): + CookieError.__init__(self) + self.data = data + self.message = message + + def __str__(self): + return '%r %r' % (self.message, self.data) + + +class InvalidCookieAttributeError(CookieError): + """Raised when setting an invalid attribute on a Cookie. + """ + def __init__(self, name, value, reason=None): + CookieError.__init__(self) + self.name = name + self.value = value + self.reason = reason + + def __str__(self): + prefix = ("%s: " % self.reason) if self.reason else "" + if self.name is None: + return '%s%r' % (prefix, self.value) + return '%s%r = %r' % (prefix, self.name, self.value) + + +class Definitions(object): + """Namespace to hold definitions used in cookie parsing (mostly pieces of + regex). + + These are separated out for individual testing against examples and RFC + grammar, and kept here to avoid cluttering other namespaces. + """ + # Most of the following are set down or cited in RFC 6265 4.1.1 + + # This is the grammar's 'cookie-name' defined as 'token' per RFC 2616 2.2. + COOKIE_NAME = r"!#$%&'*+\-.0-9A-Z^_`a-z|~" + + # 'cookie-octet' - as used twice in definition of 'cookie-value' + COOKIE_OCTET = r"\x21\x23-\x2B\--\x3A\x3C-\x5B\]-\x7E" + + # extension-av - also happens to be a superset of cookie-av and path-value + EXTENSION_AV = """ !"#$%&\\\\'()*+,\-./0-9:<=>?@A-Z[\\]^_`a-z{|}~""" + + # This is for the first pass parse on a Set-Cookie: response header. It + # includes cookie-value, cookie-pair, set-cookie-string, cookie-av. + # extension-av is used to extract the chunk containing variable-length, + # unordered attributes. The second pass then uses ATTR to break out each + # attribute and extract it appropriately. + # As compared with the RFC production grammar, it is must more liberal with + # space characters, in order not to break on data made by barbarians. + SET_COOKIE_HEADER = """(?x) # Verbose mode + ^(?:Set-Cookie:[ ]*)? + (?P[{name}:]+) + [ ]*=[ ]* + + # Accept anything in quotes - this is not RFC 6265, but might ease + # working with older code that half-heartedly works with 2965. Accept + # spaces inside tokens up front, so we can deal with that error one + # cookie at a time, after this first pass. + (?P(?:"{value}*")|(?:[{cookie_octet} ]*)) + [ ]* + + # Extract everything up to the end in one chunk, which will be broken + # down in the second pass. Don't match if there's any unexpected + # garbage at the end (hence the \Z; $ matches before newline). + (?P(?:;[ ]*[{cookie_av}]+)*) + """.format(name=COOKIE_NAME, cookie_av=EXTENSION_AV + ";", + cookie_octet=COOKIE_OCTET, value="[^;]") + + # Now we specify the individual patterns for the attribute extraction pass + # of Set-Cookie parsing (mapping to *-av in the RFC grammar). Things which + # don't match any of these but are in extension-av are simply ignored; + # anything else should be rejected in the first pass (SET_COOKIE_HEADER). + + # Max-Age attribute. These are digits, they are expressed this way + # because that is how they are expressed in the RFC. + MAX_AGE_AV = "Max-Age=(?P[\x30-\x39]+)" + + # Domain attribute; a label is one part of the domain + LABEL = '{let_dig}(?:(?:{let_dig_hyp}+)?{let_dig})?'.format( + let_dig="[A-Za-z0-9]", let_dig_hyp="[0-9A-Za-z\-]") + DOMAIN = "\.?(?:{label}\.)*(?:{label})".format(label=LABEL) + # Parse initial period though it's wrong, as RFC 6265 4.1.2.3 + DOMAIN_AV = "Domain=(?P{domain})".format(domain=DOMAIN) + + # Path attribute. We don't take special care with quotes because + # they are hardly used, they don't allow invalid characters per RFC 6265, + # and " is a valid character to occur in a path value anyway. + PATH_AV = 'Path=(?P[%s]+)' % EXTENSION_AV + + # Expires attribute. This gets big because of date parsing, which needs to + # support a large range of formats, so it's broken down into pieces. + + # Generate a mapping of months to use in render/parse, to avoid + # localizations which might be produced by strftime (e.g. %a -> Mayo) + month_list = ["January", "February", "March", "April", "May", "June", + "July", "August", "September", "October", "November", + "December"] + month_abbr_list = [item[:3] for item in month_list] + month_numbers = {} + for index, name in enumerate(month_list): + name = name.lower() + month_numbers[name[:3]] = index + 1 + month_numbers[name] = index + 1 + # Use the same list to create regexps for months. + MONTH_SHORT = "(?:" + "|".join(item[:3] for item in month_list) + ")" + MONTH_LONG = "(?:" + "|".join(item for item in month_list) + ")" + + # Same drill with weekdays, for the same reason. + weekday_list = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", + "Saturday", "Sunday"] + weekday_abbr_list = [item[:3] for item in weekday_list] + WEEKDAY_SHORT = "(?:" + "|".join(item[:3] for item in weekday_list) + ")" + WEEKDAY_LONG = "(?:" + "|".join(item for item in weekday_list) + ")" + + # This regexp tries to exclude obvious nonsense in the first pass. + DAY_OF_MONTH = "(?:[0 ]?[1-9]|[12][0-9]|[3][01])(?!\d)" + + # Here is the overall date format; ~99% of cases fold into one generalized + # syntax like RFC 1123, and many of the rest use asctime-like formats. + # (see test_date_formats for a full exegesis) + DATE = """(?ix) # Case-insensitive mode, verbose mode + (?: + (?P(?:{wdy}|{weekday}),[ ])? + (?P{day}) + [ \-] + (?P{mon}|{month}) + [ \-] + # This does not support 3-digit years, which are rare and don't + # seem to have one canonical interpretation. + (?P(?:\d{{2}}|\d{{4}})) + [ ] + # HH:MM[:SS] GMT + (?P(?:[ 0][0-9]|[01][0-9]|2[0-3])) + :(?P(?:0[0-9]|[1-5][0-9])) + (?::(?P\d{{2}}))? + [ ]GMT + | + # Support asctime format, e.g. 'Sun Nov 6 08:49:37 1994' + (?P{wdy})[ ] + (?P{mon})[ ] + (?P[ ]\d|\d\d)[ ] + (?P\d\d): + (?P\d\d) + (?::(?P\d\d)?)[ ] + (?P\d\d\d\d) + (?:[ ]GMT)? # GMT (Amazon) + ) + """ + DATE = DATE.format(wdy=WEEKDAY_SHORT, weekday=WEEKDAY_LONG, + day=DAY_OF_MONTH, mon=MONTH_SHORT, month=MONTH_LONG) + + EXPIRES_AV = "Expires=(?P%s)" % DATE + + # Now we're ready to define a regexp which can match any number of attrs + # in the variable portion of the Set-Cookie header (like the unnamed latter + # part of set-cookie-string in the grammar). Each regexp of any complexity + # is split out for testing by itself. + ATTR = """(?ix) # Case-insensitive mode, verbose mode + # Always start with start or semicolon and any number of spaces + (?:^|;)[ ]*(?: + # Big disjunction of attribute patterns (*_AV), with named capture + # groups to extract everything in one pass. Anything unrecognized + # goes in the 'unrecognized' capture group for reporting. + {expires} + |{max_age} + |{domain} + |{path} + |(?PSecure=?) + |(?PHttpOnly=?) + |Version=(?P[{stuff}]+) + |Comment=(?P[{stuff}]+) + |(?P[{stuff}]+) + ) + # End with any number of spaces not matched by the preceding (up to the + # next semicolon) - but do not capture these. + [ ]* + """.format(expires=EXPIRES_AV, max_age=MAX_AGE_AV, domain=DOMAIN_AV, + path=PATH_AV, stuff=EXTENSION_AV) + + # For request data ("Cookie: ") parsing, with finditer cf. RFC 6265 4.2.1 + COOKIE = """(?x) # Verbose mode + (?: # Either something close to valid... + + # Match starts at start of string, or at separator. + # Split on comma for the sake of legacy code (RFC 2109/2965), + # and since it only breaks when invalid commas are put in values. + # see http://bugs.python.org/issue1210326 + (?:^Cookie:|^|;|,) + + # 1 or more valid token characters making up the name (captured) + # with colon added to accommodate users of some old Java apps, etc. + [ ]* + (?P[{name}:]+) + [ ]* + = + [ ]* + + # While 6265 provides only for cookie-octet, this allows just about + # anything in quotes (like in RFC 2616); people stuck on RFC + # 2109/2965 will expect it to work this way. The non-quoted token + # allows interior spaces ('\x20'), which is not valid. In both + # cases, the decision of whether to allow these is downstream. + (?P + ["][^\00-\31"]*["] + | + [{value}] + | + [{value}][{value} ]*[{value}]+ + | + ) + + # ... Or something way off-spec - extract to report and move on + | + (?P[^;]+) + ) + # Trailing spaces after value + [ ]* + # Must end with ; or be at end of string (don't consume this though, + # so use the lookahead assertion ?= + (?=;|\Z) + """.format(name=COOKIE_NAME, value=COOKIE_OCTET) + + # Precompile externally useful definitions into re objects. + COOKIE_NAME_RE = re.compile("^([%s:]+)\Z" % COOKIE_NAME) + COOKIE_RE = re.compile(COOKIE) + SET_COOKIE_HEADER_RE = re.compile(SET_COOKIE_HEADER) + ATTR_RE = re.compile(ATTR) + DATE_RE = re.compile(DATE) + DOMAIN_RE = re.compile(DOMAIN) + PATH_RE = re.compile('^([%s]+)\Z' % EXTENSION_AV) + EOL = re.compile("(?:\r\n|\n)") + + +def strip_spaces_and_quotes(value): + """Remove invalid whitespace and/or single pair of dquotes and return None + for empty strings. + + Used to prepare cookie values, path, and domain attributes in a way which + tolerates simple formatting mistakes and standards variations. + """ + value = value.strip() if value else "" + if value and len(value) > 1 and (value[0] == value[-1] == '"'): + value = value[1:-1] + if not value: + value = "" + return value + + +def parse_string(data, unquote=default_unquote): + """Decode URL-encoded strings to UTF-8 containing the escaped chars. + """ + if data is None: + return None + + # We'll soon need to unquote to recover our UTF-8 data. + # In Python 2, unquote crashes on chars beyond ASCII. So encode functions + # had better not include anything beyond ASCII in data. + # In Python 3, unquote crashes on bytes objects, requiring conversion to + # str objects (unicode) using decode(). + # But in Python 2, the same decode causes unquote to butcher the data. + # So in that case, just leave the bytes. + if isinstance(data, bytes): + if sys.version_info > (3, 0, 0): # pragma: no cover + data = data.decode('ascii') + # Recover URL encoded data + unquoted = unquote(data) + # Without this step, Python 2 may have good URL decoded *bytes*, + # which will therefore not normalize as unicode and not compare to + # the original. + if isinstance(unquoted, bytes): + unquoted = unquoted.decode('utf-8') + return unquoted + + +def parse_date(value): + """Parse an RFC 1123 or asctime-like format date string to produce + a Python datetime object (without a timezone). + """ + # Do the regex magic; also enforces 2 or 4 digit years + match = Definitions.DATE_RE.match(value) if value else None + if not match: + return None + # We're going to extract and prepare captured data in 'data'. + data = {} + captured = match.groupdict() + fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] + # If we matched on the RFC 1123 family format + if captured['year']: + for field in fields: + data[field] = captured[field] + # If we matched on the asctime format, use year2 etc. + else: + for field in fields: + data[field] = captured[field + "2"] + year = data['year'] + # Interpret lame 2-digit years - base the cutoff on UNIX epoch, in case + # someone sets a '70' cookie meaning 'distant past'. This won't break for + # 58 years and people who use 2-digit years are asking for it anyway. + if len(year) == 2: + if int(year) < 70: + year = "20" + year + else: + year = "19" + year + year = int(year) + # Clamp to [1900, 9999]: strftime has min 1900, datetime has max 9999 + data['year'] = max(1900, min(year, 9999)) + # Other things which are numbers should convert to integer + for field in ['day', 'hour', 'minute', 'second']: + if data[field] is None: + data[field] = 0 + data[field] = int(data[field]) + # Look up the number datetime needs for the named month + data['month'] = Definitions.month_numbers[data['month'].lower()] + return datetime.datetime(**data) + + +def parse_domain(value): + """Parse and validate an incoming Domain attribute value. + """ + value = strip_spaces_and_quotes(value) + if value: + assert valid_domain(value) + return value + + +def parse_path(value): + """Parse and validate an incoming Path attribute value. + """ + value = strip_spaces_and_quotes(value) + assert valid_path(value) + return value + + +def parse_value(value, allow_spaces=True, unquote=default_unquote): + "Process a cookie value" + if value is None: + return None + value = strip_spaces_and_quotes(value) + value = parse_string(value, unquote=unquote) + if not allow_spaces: + assert ' ' not in value + return value + + +def valid_name(name): + "Validate a cookie name string" + if isinstance(name, bytes): + name = name.decode('ascii') + if not Definitions.COOKIE_NAME_RE.match(name): + return False + # This module doesn't support $identifiers, which are part of an obsolete + # and highly complex standard which is never used. + if name[0] == "$": + return False + return True + + +def valid_value(value, quote=default_cookie_quote, unquote=default_unquote): + """Validate a cookie value string. + + This is generic across quote/unquote functions because it directly verifies + the encoding round-trip using the specified quote/unquote functions. + So if you use different quote/unquote functions, use something like this + as a replacement for valid_value:: + + my_valid_value = lambda value: valid_value(value, quote=my_quote, + unquote=my_unquote) + """ + if value is None: + return False + + # Put the value through a round trip with the given quote and unquote + # functions, so we will know whether data will get lost or not in the event + # that we don't complain. + encoded = encode_cookie_value(value, quote=quote) + decoded = parse_string(encoded, unquote=unquote) + + # If the original string made the round trip, this is a valid value for the + # given quote and unquote functions. Since the round trip can generate + # different unicode forms, normalize before comparing, so we can ignore + # trivial inequalities. + decoded_normalized = (normalize("NFKD", decoded) + if not isinstance(decoded, bytes) else decoded) + value_normalized = (normalize("NFKD", value) + if not isinstance(value, bytes) else value) + if decoded_normalized == value_normalized: + return True + return False + + +def valid_date(date): + "Validate an expires datetime object" + # We want something that acts like a datetime. In particular, + # strings indicate a failure to parse down to an object and ints are + # nonstandard and ambiguous at best. + if not hasattr(date, 'tzinfo'): + return False + # Relevant RFCs define UTC as 'close enough' to GMT, and the maximum + # difference between UTC and GMT is often stated to be less than a second. + if date.tzinfo is None or _total_seconds(date.utcoffset()) < 1.1: + return True + return False + + +def valid_domain(domain): + "Validate a cookie domain ASCII string" + # Using encoding on domain would confuse browsers into not sending cookies. + # Generate UnicodeDecodeError up front if it can't store as ASCII. + domain.encode('ascii') + # Domains starting with periods are not RFC-valid, but this is very common + # in existing cookies, so they should still parse with DOMAIN_AV. + if Definitions.DOMAIN_RE.match(domain): + return True + return False + + +def valid_path(value): + "Validate a cookie path ASCII string" + # Generate UnicodeDecodeError if path can't store as ASCII. + value.encode("ascii") + # Cookies without leading slash will likely be ignored, raise ASAP. + if not (value and value[0] == "/"): + return False + if not Definitions.PATH_RE.match(value): + return False + return True + + +def valid_max_age(number): + "Validate a cookie Max-Age" + if isinstance(number, basestring): + try: + number = long(number) + except (ValueError, TypeError): + return False + if number >= 0 and number % 1 == 0: + return True + return False + + +def encode_cookie_value(data, quote=default_cookie_quote): + """URL-encode strings to make them safe for a cookie value. + + By default this uses urllib quoting, as used in many other cookie + implementations and in other Python code, instead of an ad hoc escaping + mechanism which includes backslashes (these also being illegal chars in RFC + 6265). + """ + if data is None: + return None + + # encode() to ASCII bytes so quote won't crash on non-ASCII. + # but doing that to bytes objects is nonsense. + # On Python 2 encode crashes if s is bytes containing non-ASCII. + # On Python 3 encode crashes on all byte objects. + if not isinstance(data, bytes): + data = data.encode("utf-8") + + # URL encode data so it is safe for cookie value + quoted = quote(data) + + # Don't force to bytes, so that downstream can use proper string API rather + # than crippled bytes, and to encourage encoding to be done just once. + return quoted + + +def encode_extension_av(data, quote=default_extension_quote): + """URL-encode strings to make them safe for an extension-av + (extension attribute value): + """ + if not data: + return '' + return quote(data) + + +def render_date(date): + """Render a date (e.g. an Expires value) per RFCs 6265/2616/1123. + + Don't give this localized (timezone-aware) datetimes. If you use them, + convert them to GMT before passing them to this. There are too many + conversion corner cases to handle this universally. + """ + if not date: + return None + assert valid_date(date) + # Avoid %a and %b, which can change with locale, breaking compliance + weekday = Definitions.weekday_abbr_list[date.weekday()] + month = Definitions.month_abbr_list[date.month - 1] + return date.strftime("{day}, %d {month} %Y %H:%M:%S GMT" + ).format(day=weekday, month=month) + + +def render_domain(domain): + if not domain: + return None + if domain[0] == '.': + return domain[1:] + return domain + + +def _parse_request(header_data, ignore_bad_cookies=False): + """Turn one or more lines of 'Cookie:' header data into a dict mapping + cookie names to cookie values (raw strings). + """ + cookies_dict = {} + for line in Definitions.EOL.split(header_data.strip()): + matches = Definitions.COOKIE_RE.finditer(line) + matches = [item for item in matches] + for match in matches: + invalid = match.group('invalid') + if invalid: + if not ignore_bad_cookies: + raise InvalidCookieError(data=invalid) + _report_invalid_cookie(invalid) + continue + name = match.group('name') + values = cookies_dict.get(name) + value = match.group('value').strip('"') + if values: + values.append(value) + else: + cookies_dict[name] = [value] + if not matches: + if not ignore_bad_cookies: + raise InvalidCookieError(data=line) + _report_invalid_cookie(line) + return cookies_dict + + +def parse_one_response(line, ignore_bad_cookies=False, + ignore_bad_attributes=True): + """Turn one 'Set-Cookie:' line into a dict mapping attribute names to + attribute values (raw strings). + """ + cookie_dict = {} + # Basic validation, extract name/value/attrs-chunk + match = Definitions.SET_COOKIE_HEADER_RE.match(line) + if not match: + if not ignore_bad_cookies: + raise InvalidCookieError(data=line) + _report_invalid_cookie(line) + return None + cookie_dict.update({ + 'name': match.group('name'), + 'value': match.group('value')}) + # Extract individual attrs from the attrs chunk + for match in Definitions.ATTR_RE.finditer(match.group('attrs')): + captured = dict((k, v) for (k, v) in match.groupdict().items() if v) + unrecognized = captured.get('unrecognized', None) + if unrecognized: + if not ignore_bad_attributes: + raise InvalidCookieAttributeError(None, unrecognized, + "unrecognized") + _report_unknown_attribute(unrecognized) + continue + # for unary flags + for key in ('secure', 'httponly'): + if captured.get(key): + captured[key] = True + # ignore subcomponents of expires - they're still there to avoid doing + # two passes + timekeys = ('weekday', 'month', 'day', 'hour', 'minute', 'second', + 'year') + if 'year' in captured: + for key in timekeys: + del captured[key] + elif 'year2' in captured: + for key in timekeys: + del captured[key + "2"] + cookie_dict.update(captured) + return cookie_dict + + +def _parse_response(header_data, ignore_bad_cookies=False, + ignore_bad_attributes=True): + """Turn one or more lines of 'Set-Cookie:' header data into a list of dicts + mapping attribute names to attribute values (as plain strings). + """ + cookie_dicts = [] + for line in Definitions.EOL.split(header_data.strip()): + if not line: + break + cookie_dict = parse_one_response( + line, ignore_bad_cookies=ignore_bad_cookies, + ignore_bad_attributes=ignore_bad_attributes) + if not cookie_dict: + continue + cookie_dicts.append(cookie_dict) + if not cookie_dicts: + if not ignore_bad_cookies: + raise InvalidCookieError(data=header_data) + _report_invalid_cookie(header_data) + return cookie_dicts + + +class Cookie(object): + """Provide a simple interface for creating, modifying, and rendering + individual HTTP cookies. + + Cookie attributes are represented as normal Python object attributes. + Parsing, rendering and validation are reconfigurable per-attribute. The + default behavior is intended to comply with RFC 6265, URL-encoding illegal + characters where necessary. For example: the default behavior for the + Expires attribute is to parse strings as datetimes using parse_date, + validate that any set value is a datetime, and render the attribute per the + preferred date format in RFC 1123. + """ + def __init__(self, name, value, **kwargs): + # If we don't have or can't set a name value, we don't want to return + # junk, so we must break control flow. And we don't want to use + # InvalidCookieAttributeError, because users may want to catch that to + # suppress all complaining about funky attributes. + try: + self.name = name + except InvalidCookieAttributeError: + raise InvalidCookieError(message="invalid name for new Cookie", + data=name) + value = value or '' + try: + self.value = value + except InvalidCookieAttributeError: + raise InvalidCookieError(message="invalid value for new Cookie", + data=value) + if kwargs: + self._set_attributes(kwargs, ignore_bad_attributes=False) + + def _set_attributes(self, attrs, ignore_bad_attributes=False): + for attr_name, attr_value in attrs.items(): + if not attr_name in self.attribute_names: + if not ignore_bad_attributes: + raise InvalidCookieAttributeError( + attr_name, attr_value, + "unknown cookie attribute '%s'" % attr_name) + _report_unknown_attribute(attr_name) + + try: + setattr(self, attr_name, attr_value) + except InvalidCookieAttributeError as error: + if not ignore_bad_attributes: + raise + _report_invalid_attribute(attr_name, attr_value, error.reason) + continue + + @classmethod + def from_dict(cls, cookie_dict, ignore_bad_attributes=True): + """Construct an instance from a dict of strings to parse. + + The main difference between this and Cookie(name, value, **kwargs) is + that the values in the argument to this method are parsed. + + If ignore_bad_attributes=True (default), values which did not parse + are set to '' in order to avoid passing bad data. + """ + name = cookie_dict.get('name', None) + if not name: + raise InvalidCookieError("Cookie must have name") + raw_value = cookie_dict.get('value', '') + # Absence or failure of parser here is fatal; errors in present name + # and value should be found by Cookie.__init__. + value = cls.attribute_parsers['value'](raw_value) + cookie = cls(name, value) + + # Parse values from serialized formats into objects + parsed = {} + for key, value in cookie_dict.items(): + # Don't want to pass name/value to _set_attributes + if key in ('name', 'value'): + continue + parser = cls.attribute_parsers.get(key) + if not parser: + # Don't let totally unknown attributes pass silently + if not ignore_bad_attributes: + raise InvalidCookieAttributeError( + key, value, "unknown cookie attribute '%s'" % key) + _report_unknown_attribute(key) + continue + try: + parsed_value = parser(value) + except Exception as e: + reason = "did not parse with %r: %r" % (parser, e) + if not ignore_bad_attributes: + raise InvalidCookieAttributeError( + key, value, reason) + _report_invalid_attribute(key, value, reason) + parsed_value = '' + parsed[key] = parsed_value + + # Set the parsed objects (does object validation automatically) + cookie._set_attributes(parsed, ignore_bad_attributes) + return cookie + + @classmethod + def from_string(cls, line, ignore_bad_cookies=False, + ignore_bad_attributes=True): + "Construct a Cookie object from a line of Set-Cookie header data." + cookie_dict = parse_one_response( + line, ignore_bad_cookies=ignore_bad_cookies, + ignore_bad_attributes=ignore_bad_attributes) + if not cookie_dict: + return None + return cls.from_dict( + cookie_dict, ignore_bad_attributes=ignore_bad_attributes) + + def to_dict(self): + this_dict = {'name': self.name, 'value': self.value} + this_dict.update(self.attributes()) + return this_dict + + def validate(self, name, value): + """Validate a cookie attribute with an appropriate validator. + + The value comes in already parsed (for example, an expires value + should be a datetime). Called automatically when an attribute + value is set. + """ + validator = self.attribute_validators.get(name, None) + if validator: + return True if validator(value) else False + return True + + def __setattr__(self, name, value): + """Attributes mentioned in attribute_names get validated using + functions in attribute_validators, raising an exception on failure. + Others get left alone. + """ + if name in self.attribute_names or name in ("name", "value"): + if name == 'name' and not value: + raise InvalidCookieError(message="Cookies must have names") + # Ignore None values indicating unset attr. Other invalids should + # raise error so users of __setattr__ can learn. + if value is not None: + if not self.validate(name, value): + raise InvalidCookieAttributeError( + name, value, "did not validate with " + + repr(self.attribute_validators.get(name))) + object.__setattr__(self, name, value) + + def __getattr__(self, name): + """Provide for acting like everything in attribute_names is + automatically set to None, rather than having to do so explicitly and + only at import time. + """ + if name in self.attribute_names: + return None + raise AttributeError(name) + + def attributes(self): + """Export this cookie's attributes as a dict of encoded values. + + This is an important part of the code for rendering attributes, e.g. + render_response(). + """ + dictionary = {} + # Only look for attributes registered in attribute_names. + for python_attr_name, cookie_attr_name in self.attribute_names.items(): + value = getattr(self, python_attr_name) + renderer = self.attribute_renderers.get(python_attr_name, None) + if renderer: + value = renderer(value) + # If renderer returns None, or it's just natively none, then the + # value is suppressed entirely - does not appear in any rendering. + if not value: + continue + dictionary[cookie_attr_name] = value + return dictionary + + def render_request(self): + """Render as a string formatted for HTTP request headers + (simple 'Cookie: ' style). + """ + # Use whatever renderers are defined for name and value. + name, value = self.name, self.value + renderer = self.attribute_renderers.get('name', None) + if renderer: + name = renderer(name) + renderer = self.attribute_renderers.get('value', None) + if renderer: + value = renderer(value) + return ''.join((name, "=", value)) + + def render_response(self): + """Render as a string formatted for HTTP response headers + (detailed 'Set-Cookie: ' style). + """ + # Use whatever renderers are defined for name and value. + # (.attributes() is responsible for all other rendering.) + name, value = self.name, self.value + renderer = self.attribute_renderers.get('name', None) + if renderer: + name = renderer(name) + renderer = self.attribute_renderers.get('value', None) + if renderer: + value = renderer(value) + return '; '.join( + ['{0}={1}'.format(name, value)] + + [key if isinstance(val, bool) else '='.join((key, val)) + for key, val in self.attributes().items()] + ) + + def __eq__(self, other): + attrs = ['name', 'value'] + list(self.attribute_names.keys()) + for attr in attrs: + mine = getattr(self, attr, None) + his = getattr(other, attr, None) + if isinstance(mine, bytes): + mine = mine.decode('utf-8') + if isinstance(his, bytes): + his = his.decode('utf-8') + if attr == 'domain': + if mine and mine[0] == '.': + mine = mine[1:] + if his and his[0] == '.': + his = his[1:] + if mine != his: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + # Add a name and its proper rendering to this dict to register an attribute + # as exportable. The key is the name of the Cookie object attribute in + # Python, and it is mapped to the name you want in the output. + # 'name' and 'value' should not be here. + attribute_names = { + 'expires': 'Expires', + 'max_age': 'Max-Age', + 'domain': 'Domain', + 'path': 'Path', + 'comment': 'Comment', + 'version': 'Version', + 'secure': 'Secure', + 'httponly': 'HttpOnly', + } + + # Register single-parameter functions in this dictionary to have them + # used for encoding outgoing values (e.g. as RFC compliant strings, + # as base64, encrypted stuff, etc.) + # These are called by the property generated by cookie_attribute(). + # Usually it would be wise not to define a renderer for name, but it is + # supported in case there is ever a real need. + attribute_renderers = { + 'value': encode_cookie_value, + 'domain': render_domain, + 'expires': render_date, + 'max_age': lambda item: str(item) if item is not None else None, + 'secure': lambda item: True if item else False, + 'httponly': lambda item: True if item else False, + 'comment': encode_extension_av, + 'version': lambda item: (str(item) if isinstance(item, int) + else encode_extension_av(item)), + } + + # Register single-parameter functions in this dictionary to have them used + # for decoding incoming values for use in the Python API (e.g. into nice + # objects, numbers, unicode strings, etc.) + # These are called by the property generated by cookie_attribute(). + attribute_parsers = { + 'value': parse_value, + 'expires': parse_date, + 'domain': parse_domain, + 'path': parse_path, + 'max_age': lambda item: long(strip_spaces_and_quotes(item)), + 'comment': parse_string, + 'version': lambda item: int(strip_spaces_and_quotes(item)), + 'secure': lambda item: True if item else False, + 'httponly': lambda item: True if item else False, + } + + # Register single-parameter functions which return a true value for + # acceptable values, and a false value for unacceptable ones. An + # attribute's validator is run after it is parsed or when it is directly + # set, and InvalidCookieAttribute is raised if validation fails (and the + # validator doesn't raise a different exception prior) + attribute_validators = { + 'name': valid_name, + 'value': valid_value, + 'expires': valid_date, + 'domain': valid_domain, + 'path': valid_path, + 'max_age': valid_max_age, + 'comment': valid_value, + 'version': lambda number: re.match("^\d+\Z", str(number)), + 'secure': lambda item: item is True or item is False, + 'httponly': lambda item: item is True or item is False, + } + + +class Cookies(dict): + """Represent a set of cookies indexed by name. + + This class bundles together a set of Cookie objects and provides + a convenient interface to them. for parsing and producing cookie headers. + In basic operation it acts just like a dict of Cookie objects, but it adds + additional convenience methods for the usual cookie tasks: add cookie + objects by their names, create new cookie objects under specified names, + parse HTTP request or response data into new cookie objects automatically + stored in the dict, and render the set in formats suitable for HTTP request + or response headers. + """ + DEFAULT_COOKIE_CLASS = Cookie + + def __init__(self, *args, **kwargs): + dict.__init__(self) + self.all_cookies = [] + self.cookie_class = kwargs.get( + "_cookie_class", self.DEFAULT_COOKIE_CLASS) + self.add(*args, **kwargs) + + def add(self, *args, **kwargs): + """Add Cookie objects by their names, or create new ones under + specified names. + + Any unnamed arguments are interpreted as existing cookies, and + are added under the value in their .name attribute. With keyword + arguments, the key is interpreted as the cookie name and the + value as the UNENCODED value stored in the cookie. + """ + # Only the first one is accessible through the main interface, + # others accessible through get_all (all_cookies). + for cookie in args: + self.all_cookies.append(cookie) + if cookie.name in self: + continue + self[cookie.name] = cookie + for key, value in kwargs.items(): + cookie = self.cookie_class(key, value) + self.all_cookies.append(cookie) + if key in self: + continue + self[key] = cookie + + def get_all(self, key): + return [cookie for cookie in self.all_cookies + if cookie.name == key] + + def parse_request(self, header_data, ignore_bad_cookies=False): + """Parse 'Cookie' header data into Cookie objects, and add them to + this Cookies object. + + :arg header_data: string containing only 'Cookie:' request headers or + header values (as in CGI/WSGI HTTP_COOKIE); if more than one, they must + be separated by CRLF (\\r\\n). + + :arg ignore_bad_cookies: if set, will log each syntactically invalid + cookie (at the granularity of semicolon-delimited blocks) rather than + raising an exception at the first bad cookie. + + :returns: a Cookies instance containing Cookie objects parsed from + header_data. + + .. note:: + If you want to parse 'Set-Cookie:' response headers, please use + parse_response instead. parse_request will happily turn 'expires=frob' + into a separate cookie without complaining, according to the grammar. + """ + cookies_dict = _parse_request( + header_data, ignore_bad_cookies=ignore_bad_cookies) + cookie_objects = [] + for name, values in cookies_dict.items(): + for value in values: + # Use from_dict to check name and parse value + cookie_dict = {'name': name, 'value': value} + try: + cookie = self.cookie_class.from_dict(cookie_dict) + except InvalidCookieError: + if not ignore_bad_cookies: + raise + else: + cookie_objects.append(cookie) + try: + self.add(*cookie_objects) + except InvalidCookieError: + if not ignore_bad_cookies: + raise + _report_invalid_cookie(header_data) + return self + + def parse_response(self, header_data, ignore_bad_cookies=False, + ignore_bad_attributes=True): + """Parse 'Set-Cookie' header data into Cookie objects, and add them to + this Cookies object. + + :arg header_data: string containing only 'Set-Cookie:' request headers + or their corresponding header values; if more than one, they must be + separated by CRLF (\\r\\n). + + :arg ignore_bad_cookies: if set, will log each syntactically invalid + cookie rather than raising an exception at the first bad cookie. (This + includes cookies which have noncompliant characters in the attribute + section). + + :arg ignore_bad_attributes: defaults to True, which means to log but + not raise an error when a particular attribute is unrecognized. (This + does not necessarily mean that the attribute is invalid, although that + would often be the case.) if unset, then an error will be raised at the + first semicolon-delimited block which has an unknown attribute. + + :returns: a Cookies instance containing Cookie objects parsed from + header_data, each with recognized attributes populated. + + .. note:: + If you want to parse 'Cookie:' headers (i.e., data like what's sent + with an HTTP request, which has only name=value pairs and no + attributes), then please use parse_request instead. Such lines often + contain multiple name=value pairs, and parse_response will throw away + the pairs after the first one, which will probably generate errors or + confusing behavior. (Since there's no perfect way to automatically + determine which kind of parsing to do, you have to tell it manually by + choosing correctly from parse_request between part_response.) + """ + cookie_dicts = _parse_response( + header_data, + ignore_bad_cookies=ignore_bad_cookies, + ignore_bad_attributes=ignore_bad_attributes) + cookie_objects = [] + for cookie_dict in cookie_dicts: + cookie = self.cookie_class.from_dict(cookie_dict) + cookie_objects.append(cookie) + self.add(*cookie_objects) + return self + + @classmethod + def from_request(cls, header_data, ignore_bad_cookies=False): + "Construct a Cookies object from request header data." + cookies = cls() + cookies.parse_request( + header_data, ignore_bad_cookies=ignore_bad_cookies) + return cookies + + @classmethod + def from_response(cls, header_data, ignore_bad_cookies=False, + ignore_bad_attributes=True): + "Construct a Cookies object from response header data." + cookies = cls() + cookies.parse_response( + header_data, + ignore_bad_cookies=ignore_bad_cookies, + ignore_bad_attributes=ignore_bad_attributes) + return cookies + + def render_request(self, sort=True): + """Render the dict's Cookie objects into a string formatted for HTTP + request headers (simple 'Cookie: ' style). + """ + if not sort: + return ("; ".join( + cookie.render_request() for cookie in self.values())) + return ("; ".join(sorted( + cookie.render_request() for cookie in self.values()))) + + def render_response(self, sort=True): + """Render the dict's Cookie objects into list of strings formatted for + HTTP response headers (detailed 'Set-Cookie: ' style). + """ + rendered = [cookie.render_response() for cookie in self.values()] + return rendered if not sort else sorted(rendered) + + def __repr__(self): + return "Cookies(%s)" % ', '.join("%s=%r" % (name, cookie.value) for + (name, cookie) in self.items()) + + def __eq__(self, other): + """Test if a Cookies object is globally 'equal' to another one by + seeing if it looks like a dict such that d[k] == self[k]. This depends + on each Cookie object reporting its equality correctly. + """ + if not hasattr(other, "keys"): + return False + try: + keys = sorted(set(self.keys()) | set(other.keys())) + for key in keys: + if not key in self: + return False + if not key in other: + return False + if self[key] != other[key]: + return False + except (TypeError, KeyError): + raise + return True + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/deps/easy_install.py b/deps/easy_install.py new file mode 100644 index 00000000..d87e9840 --- /dev/null +++ b/deps/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/deps/enum_compat-0.0.2.dist-info/DESCRIPTION.rst b/deps/enum_compat-0.0.2.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..aa749cf1 --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,9 @@ + +enum-compat +=========== + +This is a virtual package, its whole purpose is to install enum34 on +Python older than 3.4. On Python 3.4+ it's a no-op. + + + diff --git a/deps/enum_compat-0.0.2.dist-info/INSTALLER b/deps/enum_compat-0.0.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/enum_compat-0.0.2.dist-info/METADATA b/deps/enum_compat-0.0.2.dist-info/METADATA new file mode 100644 index 00000000..ff8aca40 --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/METADATA @@ -0,0 +1,30 @@ +Metadata-Version: 2.0 +Name: enum-compat +Version: 0.0.2 +Summary: enum/enum34 compatibility package +Home-page: https://github.com/jstasiak/enum-compat +Author: Jakub Stasiak +Author-email: jakub@stasiak.at +License: MIT +Keywords: enum,compatibility,enum34 +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 + + +enum-compat +=========== + +This is a virtual package, its whole purpose is to install enum34 on +Python older than 3.4. On Python 3.4+ it's a no-op. + + + diff --git a/deps/enum_compat-0.0.2.dist-info/RECORD b/deps/enum_compat-0.0.2.dist-info/RECORD new file mode 100644 index 00000000..3ded7879 --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/RECORD @@ -0,0 +1,7 @@ +enum_compat-0.0.2.dist-info/DESCRIPTION.rst,sha256=ZUxgOYtR8j28PbCHss1PpZ8wDHJxnfb_LZB7HO4RciE,150 +enum_compat-0.0.2.dist-info/METADATA,sha256=tuKXeC1xCg2NlzhUoupS_zEYriQ-0Vg8nBI1alGFVqA,908 +enum_compat-0.0.2.dist-info/RECORD,, +enum_compat-0.0.2.dist-info/WHEEL,sha256=lCqt3ViRAf9c8mCs6o7ffkwROUdYSy8_YHn5f_rulB4,93 +enum_compat-0.0.2.dist-info/metadata.json,sha256=_55lhugqrWFsIqTavHtPEOhjQApHbMVyvYnCuJgE514,884 +enum_compat-0.0.2.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +enum_compat-0.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 diff --git a/deps/enum_compat-0.0.2.dist-info/WHEEL b/deps/enum_compat-0.0.2.dist-info/WHEEL new file mode 100644 index 00000000..6d9801a2 --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: cp34-none-any + diff --git a/deps/enum_compat-0.0.2.dist-info/metadata.json b/deps/enum_compat-0.0.2.dist-info/metadata.json new file mode 100644 index 00000000..ea0ddf2d --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5"], "extensions": {"python.details": {"contacts": [{"email": "jakub@stasiak.at", "name": "Jakub Stasiak", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/jstasiak/enum-compat"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["enum", "compatibility", "enum34"], "license": "MIT", "metadata_version": "2.0", "name": "enum-compat", "summary": "enum/enum34 compatibility package", "version": "0.0.2"} \ No newline at end of file diff --git a/deps/enum_compat-0.0.2.dist-info/top_level.txt b/deps/enum_compat-0.0.2.dist-info/top_level.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/deps/enum_compat-0.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ + diff --git a/deps/forecastio/__init__.py b/deps/forecastio/__init__.py new file mode 100644 index 00000000..6132cc90 --- /dev/null +++ b/deps/forecastio/__init__.py @@ -0,0 +1 @@ +from forecastio.api import load_forecast, manual diff --git a/deps/forecastio/__pycache__/__init__.cpython-34.pyc b/deps/forecastio/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..fe3f7759 Binary files /dev/null and b/deps/forecastio/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/forecastio/__pycache__/api.cpython-34.pyc b/deps/forecastio/__pycache__/api.cpython-34.pyc new file mode 100644 index 00000000..b5c0bb58 Binary files /dev/null and b/deps/forecastio/__pycache__/api.cpython-34.pyc differ diff --git a/deps/forecastio/__pycache__/models.cpython-34.pyc b/deps/forecastio/__pycache__/models.cpython-34.pyc new file mode 100644 index 00000000..b13240ab Binary files /dev/null and b/deps/forecastio/__pycache__/models.cpython-34.pyc differ diff --git a/deps/forecastio/__pycache__/utils.cpython-34.pyc b/deps/forecastio/__pycache__/utils.cpython-34.pyc new file mode 100644 index 00000000..8454f243 Binary files /dev/null and b/deps/forecastio/__pycache__/utils.cpython-34.pyc differ diff --git a/deps/forecastio/api.py b/deps/forecastio/api.py new file mode 100644 index 00000000..4cbd5fb0 --- /dev/null +++ b/deps/forecastio/api.py @@ -0,0 +1,69 @@ +import requests +import threading + +from forecastio.models import Forecast + + +def load_forecast(key, lat, lng, time=None, units="auto", lazy=False, + callback=None): + """ + This function builds the request url and loads some or all of the + needed json depending on lazy is True + + inLat: The latitude of the forecast + inLong: The longitude of the forecast + time: A datetime.datetime object representing the desired time of + the forecast. If no timezone is present, the API assumes local + time at the provided latitude and longitude. + units: A string of the preferred units of measurement, "auto" id + default. also us,ca,uk,si is available + lazy: Defaults to false. The function will only request the json + data as it is needed. Results in more requests, but + probably a faster response time (I haven't checked) + """ + + if time is None: + url = 'https://api.darksky.net/forecast/%s/%s,%s' \ + '?units=%s' % (key, lat, lng, units,) + else: + url_time = time.replace(microsecond=0).isoformat() # API returns 400 for microseconds + url = 'https://api.darksky.net/forecast/%s/%s,%s,%s' \ + '?units=%s' % (key, lat, lng, url_time, + units,) + + if lazy is True: + baseURL = "%s&exclude=%s" % (url, + 'minutely,currently,hourly,' + 'daily,alerts,flags') + else: + baseURL = url + + return manual(baseURL, callback=callback) + + +def manual(requestURL, callback=None): + """ + This function is used by load_forecast OR by users to manually + construct the URL for an API call. + """ + + if callback is None: + return get_forecast(requestURL) + else: + thread = threading.Thread(target=load_async, + args=(requestURL, callback)) + thread.start() + + +def get_forecast(requestURL): + forecastio_reponse = requests.get(requestURL) + forecastio_reponse.raise_for_status() + + json = forecastio_reponse.json() + headers = forecastio_reponse.headers + + return Forecast(json, forecastio_reponse, headers) + + +def load_async(url, callback): + callback(get_forecast(url)) diff --git a/deps/forecastio/models.py b/deps/forecastio/models.py new file mode 100644 index 00000000..ff89d4db --- /dev/null +++ b/deps/forecastio/models.py @@ -0,0 +1,129 @@ +from forecastio.utils import UnicodeMixin, PropertyUnavailable +import datetime +import requests + + +class Forecast(UnicodeMixin): + + def __init__(self, data, response, headers): + self.response = response + self.http_headers = headers + self.json = data + + self._alerts = [] + for alertJSON in self.json.get('alerts', []): + self._alerts.append(Alert(alertJSON)) + + def update(self): + r = requests.get(self.response.url) + self.json = r.json() + self.response = r + + def currently(self): + return self._forcastio_data('currently') + + def minutely(self): + return self._forcastio_data('minutely') + + def hourly(self): + return self._forcastio_data('hourly') + + def daily(self): + return self._forcastio_data('daily') + + def offset(self): + return self.json['offset'] + + def alerts(self): + return self._alerts + + def _forcastio_data(self, key): + keys = ['minutely', 'currently', 'hourly', 'daily'] + try: + if key not in self.json: + keys.remove(key) + url = "%s&exclude=%s%s" % (self.response.url.split('&')[0], + ','.join(keys), ',alerts,flags') + + response = requests.get(url).json() + self.json[key] = response[key] + + if key == 'currently': + return ForecastioDataPoint(self.json[key]) + else: + return ForecastioDataBlock(self.json[key]) + except: + if key == 'currently': + return ForecastioDataPoint() + else: + return ForecastioDataBlock() + + +class ForecastioDataBlock(UnicodeMixin): + + def __init__(self, d=None): + d = d or {} + self.summary = d.get('summary') + self.icon = d.get('icon') + + self.data = [ForecastioDataPoint(datapoint) + for datapoint in d.get('data', [])] + + def __unicode__(self): + return '' % (self.summary, + len(self.data),) + + +class ForecastioDataPoint(UnicodeMixin): + + def __init__(self, d={}): + self.d = d + + try: + self.time = datetime.datetime.utcfromtimestamp(int(d['time'])) + self.utime = d['time'] + except: + pass + + try: + sr_time = int(d['sunriseTime']) + self.sunriseTime = datetime.datetime.utcfromtimestamp(sr_time) + except: + self.sunriseTime = None + + try: + ss_time = int(d['sunsetTime']) + self.sunsetTime = datetime.datetime.utcfromtimestamp(ss_time) + except: + self.sunsetTime = None + + def __getattr__(self, name): + try: + return self.d[name] + except KeyError: + raise PropertyUnavailable( + "Property '{}' is not valid" + " or is not available for this forecast".format(name) + ) + + def __unicode__(self): + return '' % (self.summary, self.time,) + + +class Alert(UnicodeMixin): + def __init__(self, json): + self.json = json + + def __getattr__(self, name): + try: + return self.json[name] + except KeyError: + raise PropertyUnavailable( + "Property '{}' is not valid" + " or is not available for this forecast".format(name) + ) + + def __unicode__(self): + return ''.format(self.title, self.time) diff --git a/deps/forecastio/utils.py b/deps/forecastio/utils.py new file mode 100644 index 00000000..d5d6972e --- /dev/null +++ b/deps/forecastio/utils.py @@ -0,0 +1,18 @@ +import sys + + +class UnicodeMixin(object): + + """Mixin class to handle defining the proper __str__/__unicode__ + methods in Python 2 or 3.""" + + if sys.version_info[0] >= 3: # Python 3 + def __str__(self): + return self.__unicode__() + else: # Python 2 + def __str__(self): + return self.__unicode__().encode('utf8') + + +class PropertyUnavailable(AttributeError): + pass diff --git a/deps/google/protobuf/__init__.py b/deps/google/protobuf/__init__.py new file mode 100644 index 00000000..533821c1 --- /dev/null +++ b/deps/google/protobuf/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Copyright 2007 Google Inc. All Rights Reserved. + +__version__ = '3.0.0b2' diff --git a/deps/google/protobuf/__pycache__/__init__.cpython-34.pyc b/deps/google/protobuf/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..8e9f2351 Binary files /dev/null and b/deps/google/protobuf/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/any_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/any_pb2.cpython-34.pyc new file mode 100644 index 00000000..722d857f Binary files /dev/null and b/deps/google/protobuf/__pycache__/any_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/api_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/api_pb2.cpython-34.pyc new file mode 100644 index 00000000..8479c47b Binary files /dev/null and b/deps/google/protobuf/__pycache__/api_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/descriptor.cpython-34.pyc b/deps/google/protobuf/__pycache__/descriptor.cpython-34.pyc new file mode 100644 index 00000000..41d70271 Binary files /dev/null and b/deps/google/protobuf/__pycache__/descriptor.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/descriptor_database.cpython-34.pyc b/deps/google/protobuf/__pycache__/descriptor_database.cpython-34.pyc new file mode 100644 index 00000000..ec0d4a0f Binary files /dev/null and b/deps/google/protobuf/__pycache__/descriptor_database.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/descriptor_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/descriptor_pb2.cpython-34.pyc new file mode 100644 index 00000000..08e7100a Binary files /dev/null and b/deps/google/protobuf/__pycache__/descriptor_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/descriptor_pool.cpython-34.pyc b/deps/google/protobuf/__pycache__/descriptor_pool.cpython-34.pyc new file mode 100644 index 00000000..2705fbfc Binary files /dev/null and b/deps/google/protobuf/__pycache__/descriptor_pool.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/duration_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/duration_pb2.cpython-34.pyc new file mode 100644 index 00000000..5cb6a57f Binary files /dev/null and b/deps/google/protobuf/__pycache__/duration_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/empty_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/empty_pb2.cpython-34.pyc new file mode 100644 index 00000000..c3168beb Binary files /dev/null and b/deps/google/protobuf/__pycache__/empty_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/field_mask_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/field_mask_pb2.cpython-34.pyc new file mode 100644 index 00000000..b116da5a Binary files /dev/null and b/deps/google/protobuf/__pycache__/field_mask_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/json_format.cpython-34.pyc b/deps/google/protobuf/__pycache__/json_format.cpython-34.pyc new file mode 100644 index 00000000..8562d8e3 Binary files /dev/null and b/deps/google/protobuf/__pycache__/json_format.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/map_unittest_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/map_unittest_pb2.cpython-34.pyc new file mode 100644 index 00000000..b46659e2 Binary files /dev/null and b/deps/google/protobuf/__pycache__/map_unittest_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/message.cpython-34.pyc b/deps/google/protobuf/__pycache__/message.cpython-34.pyc new file mode 100644 index 00000000..5666af4f Binary files /dev/null and b/deps/google/protobuf/__pycache__/message.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/message_factory.cpython-34.pyc b/deps/google/protobuf/__pycache__/message_factory.cpython-34.pyc new file mode 100644 index 00000000..ca8322a9 Binary files /dev/null and b/deps/google/protobuf/__pycache__/message_factory.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/proto_builder.cpython-34.pyc b/deps/google/protobuf/__pycache__/proto_builder.cpython-34.pyc new file mode 100644 index 00000000..e050d7ec Binary files /dev/null and b/deps/google/protobuf/__pycache__/proto_builder.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/reflection.cpython-34.pyc b/deps/google/protobuf/__pycache__/reflection.cpython-34.pyc new file mode 100644 index 00000000..40d4581d Binary files /dev/null and b/deps/google/protobuf/__pycache__/reflection.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/service.cpython-34.pyc b/deps/google/protobuf/__pycache__/service.cpython-34.pyc new file mode 100644 index 00000000..5da25d9f Binary files /dev/null and b/deps/google/protobuf/__pycache__/service.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/service_reflection.cpython-34.pyc b/deps/google/protobuf/__pycache__/service_reflection.cpython-34.pyc new file mode 100644 index 00000000..6f6acb5d Binary files /dev/null and b/deps/google/protobuf/__pycache__/service_reflection.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/source_context_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/source_context_pb2.cpython-34.pyc new file mode 100644 index 00000000..16b35e97 Binary files /dev/null and b/deps/google/protobuf/__pycache__/source_context_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/struct_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/struct_pb2.cpython-34.pyc new file mode 100644 index 00000000..5adaa727 Binary files /dev/null and b/deps/google/protobuf/__pycache__/struct_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/symbol_database.cpython-34.pyc b/deps/google/protobuf/__pycache__/symbol_database.cpython-34.pyc new file mode 100644 index 00000000..9668b8d5 Binary files /dev/null and b/deps/google/protobuf/__pycache__/symbol_database.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/text_encoding.cpython-34.pyc b/deps/google/protobuf/__pycache__/text_encoding.cpython-34.pyc new file mode 100644 index 00000000..3232b218 Binary files /dev/null and b/deps/google/protobuf/__pycache__/text_encoding.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/text_format.cpython-34.pyc b/deps/google/protobuf/__pycache__/text_format.cpython-34.pyc new file mode 100644 index 00000000..6f99eae4 Binary files /dev/null and b/deps/google/protobuf/__pycache__/text_format.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/timestamp_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/timestamp_pb2.cpython-34.pyc new file mode 100644 index 00000000..db74ad2e Binary files /dev/null and b/deps/google/protobuf/__pycache__/timestamp_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/type_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/type_pb2.cpython-34.pyc new file mode 100644 index 00000000..e99837c3 Binary files /dev/null and b/deps/google/protobuf/__pycache__/type_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_arena_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_arena_pb2.cpython-34.pyc new file mode 100644 index 00000000..22f4e45b Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_arena_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_custom_options_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_custom_options_pb2.cpython-34.pyc new file mode 100644 index 00000000..13cdfcac Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_custom_options_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_import_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_import_pb2.cpython-34.pyc new file mode 100644 index 00000000..b9e27bbb Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_import_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_import_public_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_import_public_pb2.cpython-34.pyc new file mode 100644 index 00000000..f314a20b Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_import_public_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_mset_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_mset_pb2.cpython-34.pyc new file mode 100644 index 00000000..6f88a496 Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_mset_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_mset_wire_format_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_mset_wire_format_pb2.cpython-34.pyc new file mode 100644 index 00000000..f2e2b0ec Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_mset_wire_format_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_no_arena_import_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_no_arena_import_pb2.cpython-34.pyc new file mode 100644 index 00000000..bb0c830d Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_no_arena_import_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_no_arena_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_no_arena_pb2.cpython-34.pyc new file mode 100644 index 00000000..4526b066 Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_no_arena_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_no_generic_services_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_no_generic_services_pb2.cpython-34.pyc new file mode 100644 index 00000000..13938d05 Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_no_generic_services_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_pb2.cpython-34.pyc new file mode 100644 index 00000000..3efc97d3 Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/unittest_proto3_arena_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/unittest_proto3_arena_pb2.cpython-34.pyc new file mode 100644 index 00000000..383ce16b Binary files /dev/null and b/deps/google/protobuf/__pycache__/unittest_proto3_arena_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/__pycache__/wrappers_pb2.cpython-34.pyc b/deps/google/protobuf/__pycache__/wrappers_pb2.cpython-34.pyc new file mode 100644 index 00000000..2a7851ed Binary files /dev/null and b/deps/google/protobuf/__pycache__/wrappers_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/any_pb2.py b/deps/google/protobuf/any_pb2.py new file mode 100644 index 00000000..5c4b3f59 --- /dev/null +++ b/deps/google/protobuf/any_pb2.py @@ -0,0 +1,78 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/any.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/any.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42K\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_ANY = _descriptor.Descriptor( + name='Any', + full_name='google.protobuf.Any', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type_url', full_name='google.protobuf.Any.type_url', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.Any.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=46, + serialized_end=84, +) + +DESCRIPTOR.message_types_by_name['Any'] = _ANY + +Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), dict( + DESCRIPTOR = _ANY, + __module__ = 'google.protobuf.any_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Any) + )) +_sym_db.RegisterMessage(Any) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\010AnyProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/api_pb2.py b/deps/google/protobuf/api_pb2.py new file mode 100644 index 00000000..5a0bc1b6 --- /dev/null +++ b/deps/google/protobuf/api_pb2.py @@ -0,0 +1,250 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/api.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 +from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/api.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBK\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_source__context__pb2.DESCRIPTOR,google_dot_protobuf_dot_type__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_API = _descriptor.Descriptor( + name='Api', + full_name='google.protobuf.Api', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Api.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='methods', full_name='google.protobuf.Api.methods', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.Api.options', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='google.protobuf.Api.version', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='source_context', full_name='google.protobuf.Api.source_context', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mixins', full_name='google.protobuf.Api.mixins', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.Api.syntax', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=370, +) + + +_METHOD = _descriptor.Descriptor( + name='Method', + full_name='google.protobuf.Method', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Method.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='request_type_url', full_name='google.protobuf.Method.request_type_url', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='request_streaming', full_name='google.protobuf.Method.request_streaming', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='response_type_url', full_name='google.protobuf.Method.response_type_url', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='response_streaming', full_name='google.protobuf.Method.response_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.Method.options', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.Method.syntax', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=373, + serialized_end=586, +) + + +_MIXIN = _descriptor.Descriptor( + name='Mixin', + full_name='google.protobuf.Mixin', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Mixin.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='root', full_name='google.protobuf.Mixin.root', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=588, + serialized_end=623, +) + +_API.fields_by_name['methods'].message_type = _METHOD +_API.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION +_API.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT +_API.fields_by_name['mixins'].message_type = _MIXIN +_API.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX +_METHOD.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION +_METHOD.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX +DESCRIPTOR.message_types_by_name['Api'] = _API +DESCRIPTOR.message_types_by_name['Method'] = _METHOD +DESCRIPTOR.message_types_by_name['Mixin'] = _MIXIN + +Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), dict( + DESCRIPTOR = _API, + __module__ = 'google.protobuf.api_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Api) + )) +_sym_db.RegisterMessage(Api) + +Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), dict( + DESCRIPTOR = _METHOD, + __module__ = 'google.protobuf.api_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Method) + )) +_sym_db.RegisterMessage(Method) + +Mixin = _reflection.GeneratedProtocolMessageType('Mixin', (_message.Message,), dict( + DESCRIPTOR = _MIXIN, + __module__ = 'google.protobuf.api_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Mixin) + )) +_sym_db.RegisterMessage(Mixin) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\010ApiProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/compiler/__init__.py b/deps/google/protobuf/compiler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/deps/google/protobuf/compiler/__pycache__/__init__.cpython-34.pyc b/deps/google/protobuf/compiler/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..22198983 Binary files /dev/null and b/deps/google/protobuf/compiler/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-34.pyc b/deps/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-34.pyc new file mode 100644 index 00000000..b9f03c16 Binary files /dev/null and b/deps/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/compiler/plugin_pb2.py b/deps/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 00000000..e01b7a71 --- /dev/null +++ b/deps/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,188 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/compiler/plugin.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/compiler/plugin.proto', + package='google.protobuf.compiler', + syntax='proto2', + serialized_pb=_b('\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB7\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ\tplugin_go') + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_CODEGENERATORREQUEST = _descriptor.Descriptor( + name='CodeGeneratorRequest', + full_name='google.protobuf.compiler.CodeGeneratorRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2, + number=15, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=101, + serialized_end=226, +) + + +_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor( + name='File', + full_name='google.protobuf.compiler.CodeGeneratorResponse.File', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2, + number=15, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=337, + serialized_end=399, +) + +_CODEGENERATORRESPONSE = _descriptor.Descriptor( + name='CodeGeneratorResponse', + full_name='google.protobuf.compiler.CodeGeneratorResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1, + number=15, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_CODEGENERATORRESPONSE_FILE, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=229, + serialized_end=399, +) + +_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEDESCRIPTORPROTO +_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE +_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE +DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST +DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE + +CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), dict( + DESCRIPTOR = _CODEGENERATORREQUEST, + __module__ = 'google.protobuf.compiler.plugin_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest) + )) +_sym_db.RegisterMessage(CodeGeneratorRequest) + +CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), dict( + + File = _reflection.GeneratedProtocolMessageType('File', (_message.Message,), dict( + DESCRIPTOR = _CODEGENERATORRESPONSE_FILE, + __module__ = 'google.protobuf.compiler.plugin_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File) + )) + , + DESCRIPTOR = _CODEGENERATORRESPONSE, + __module__ = 'google.protobuf.compiler.plugin_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse) + )) +_sym_db.RegisterMessage(CodeGeneratorResponse) +_sym_db.RegisterMessage(CodeGeneratorResponse.File) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.protobuf.compilerB\014PluginProtosZ\tplugin_go')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/descriptor.py b/deps/google/protobuf/descriptor.py new file mode 100644 index 00000000..5f613c88 --- /dev/null +++ b/deps/google/protobuf/descriptor.py @@ -0,0 +1,966 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Descriptors essentially contain exactly the information found in a .proto +file, in types that make this information accessible in Python. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import six + +from google.protobuf.internal import api_implementation + +_USE_C_DESCRIPTORS = False +if api_implementation.Type() == 'cpp': + # Used by MakeDescriptor in cpp mode + import os + import uuid + from google.protobuf.pyext import _message + _USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False) + + +class Error(Exception): + """Base error for this module.""" + + +class TypeTransformationError(Error): + """Error transforming between python proto type and corresponding C++ type.""" + + +if _USE_C_DESCRIPTORS: + # This metaclass allows to override the behavior of code like + # isinstance(my_descriptor, FieldDescriptor) + # and make it return True when the descriptor is an instance of the extension + # type written in C++. + class DescriptorMetaclass(type): + def __instancecheck__(cls, obj): + if super(DescriptorMetaclass, cls).__instancecheck__(obj): + return True + if isinstance(obj, cls._C_DESCRIPTOR_CLASS): + return True + return False +else: + # The standard metaclass; nothing changes. + DescriptorMetaclass = type + + +class DescriptorBase(six.with_metaclass(DescriptorMetaclass)): + + """Descriptors base class. + + This class is the base of all descriptor classes. It provides common options + related functionality. + + Attributes: + has_options: True if the descriptor has non-default options. Usually it + is not necessary to read this -- just call GetOptions() which will + happily return the default instance. However, it's sometimes useful + for efficiency, and also useful inside the protobuf implementation to + avoid some bootstrapping issues. + """ + + if _USE_C_DESCRIPTORS: + # The class, or tuple of classes, that are considered as "virtual + # subclasses" of this descriptor class. + _C_DESCRIPTOR_CLASS = () + + def __init__(self, options, options_class_name): + """Initialize the descriptor given its options message and the name of the + class of the options message. The name of the class is required in case + the options message is None and has to be created. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def _SetOptions(self, options, options_class_name): + """Sets the descriptor's options + + This function is used in generated proto2 files to update descriptor + options. It must not be used outside proto2. + """ + self._options = options + self._options_class_name = options_class_name + + # Does this descriptor have non-default options? + self.has_options = options is not None + + def GetOptions(self): + """Retrieves descriptor options. + + This method returns the options set or creates the default options for the + descriptor. + """ + if self._options: + return self._options + from google.protobuf import descriptor_pb2 + try: + options_class = getattr(descriptor_pb2, self._options_class_name) + except AttributeError: + raise RuntimeError('Unknown options class name %s!' % + (self._options_class_name)) + self._options = options_class() + return self._options + + +class _NestedDescriptorBase(DescriptorBase): + """Common class for descriptors that can be nested.""" + + def __init__(self, options, options_class_name, name, full_name, + file, containing_type, serialized_start=None, + serialized_end=None): + """Constructor. + + Args: + options: Protocol message options or None + to use default message options. + options_class_name: (str) The class name of the above options. + + name: (str) Name of this protocol message type. + full_name: (str) Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + file: (FileDescriptor) Reference to file info. + containing_type: if provided, this is a nested descriptor, with this + descriptor as parent, otherwise None. + serialized_start: The start index (inclusive) in block in the + file.serialized_pb that describes this descriptor. + serialized_end: The end index (exclusive) in block in the + file.serialized_pb that describes this descriptor. + """ + super(_NestedDescriptorBase, self).__init__( + options, options_class_name) + + self.name = name + # TODO(falk): Add function to calculate full_name instead of having it in + # memory? + self.full_name = full_name + self.file = file + self.containing_type = containing_type + + self._serialized_start = serialized_start + self._serialized_end = serialized_end + + def GetTopLevelContainingType(self): + """Returns the root if this is a nested type, or itself if its the root.""" + desc = self + while desc.containing_type is not None: + desc = desc.containing_type + return desc + + def CopyToProto(self, proto): + """Copies this to the matching proto in descriptor_pb2. + + Args: + proto: An empty proto instance from descriptor_pb2. + + Raises: + Error: If self couldnt be serialized, due to to few constructor arguments. + """ + if (self.file is not None and + self._serialized_start is not None and + self._serialized_end is not None): + proto.ParseFromString(self.file.serialized_pb[ + self._serialized_start:self._serialized_end]) + else: + raise Error('Descriptor does not contain serialization.') + + +class Descriptor(_NestedDescriptorBase): + + """Descriptor for a protocol message type. + + A Descriptor instance has the following attributes: + + name: (str) Name of this protocol message type. + full_name: (str) Fully-qualified name of this protocol message type, + which will include protocol "package" name and the name of any + enclosing types. + + containing_type: (Descriptor) Reference to the descriptor of the + type containing us, or None if this is top-level. + + fields: (list of FieldDescriptors) Field descriptors for all + fields in this type. + fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor + objects as in |fields|, but indexed by "number" attribute in each + FieldDescriptor. + fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor + objects as in |fields|, but indexed by "name" attribute in each + FieldDescriptor. + fields_by_camelcase_name: (dict str -> FieldDescriptor) Same + FieldDescriptor objects as in |fields|, but indexed by + "camelcase_name" attribute in each FieldDescriptor. + + nested_types: (list of Descriptors) Descriptor references + for all protocol message types nested within this one. + nested_types_by_name: (dict str -> Descriptor) Same Descriptor + objects as in |nested_types|, but indexed by "name" attribute + in each Descriptor. + + enum_types: (list of EnumDescriptors) EnumDescriptor references + for all enums contained within this type. + enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor + objects as in |enum_types|, but indexed by "name" attribute + in each EnumDescriptor. + enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping + from enum value name to EnumValueDescriptor for that value. + + extensions: (list of FieldDescriptor) All extensions defined directly + within this message type (NOT within a nested type). + extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor + objects as |extensions|, but indexed by "name" attribute of each + FieldDescriptor. + + is_extendable: Does this type define any extension ranges? + + oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields + in this message. + oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|, + but indexed by "name" attribute. + + file: (FileDescriptor) Reference to file descriptor. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.Descriptor + + def __new__(cls, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, + syntax=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindMessageTypeByName(full_name) + + # NOTE(tmarek): The file argument redefining a builtin is nothing we can + # fix right now since we don't know how many clients already rely on the + # name of the argument. + def __init__(self, name, full_name, filename, containing_type, fields, + nested_types, enum_types, extensions, options=None, + is_extendable=True, extension_ranges=None, oneofs=None, + file=None, serialized_start=None, serialized_end=None, + syntax=None): # pylint:disable=redefined-builtin + """Arguments to __init__() are as described in the description + of Descriptor fields above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + super(Descriptor, self).__init__( + options, 'MessageOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end) + + # We have fields in addition to fields_by_name and fields_by_number, + # so that: + # 1. Clients can index fields by "order in which they're listed." + # 2. Clients can easily iterate over all fields with the terse + # syntax: for f in descriptor.fields: ... + self.fields = fields + for field in self.fields: + field.containing_type = self + self.fields_by_number = dict((f.number, f) for f in fields) + self.fields_by_name = dict((f.name, f) for f in fields) + self._fields_by_camelcase_name = None + + self.nested_types = nested_types + for nested_type in nested_types: + nested_type.containing_type = self + self.nested_types_by_name = dict((t.name, t) for t in nested_types) + + self.enum_types = enum_types + for enum_type in self.enum_types: + enum_type.containing_type = self + self.enum_types_by_name = dict((t.name, t) for t in enum_types) + self.enum_values_by_name = dict( + (v.name, v) for t in enum_types for v in t.values) + + self.extensions = extensions + for extension in self.extensions: + extension.extension_scope = self + self.extensions_by_name = dict((f.name, f) for f in extensions) + self.is_extendable = is_extendable + self.extension_ranges = extension_ranges + self.oneofs = oneofs if oneofs is not None else [] + self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) + for oneof in self.oneofs: + oneof.containing_type = self + self.syntax = syntax or "proto2" + + @property + def fields_by_camelcase_name(self): + if self._fields_by_camelcase_name is None: + self._fields_by_camelcase_name = dict( + (f.camelcase_name, f) for f in self.fields) + return self._fields_by_camelcase_name + + def EnumValueName(self, enum, value): + """Returns the string name of an enum value. + + This is just a small helper method to simplify a common operation. + + Args: + enum: string name of the Enum. + value: int, value of the enum. + + Returns: + string name of the enum value. + + Raises: + KeyError if either the Enum doesn't exist or the value is not a valid + value for the enum. + """ + return self.enum_types_by_name[enum].values_by_number[value].name + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.DescriptorProto. + + Args: + proto: An empty descriptor_pb2.DescriptorProto. + """ + # This function is overriden to give a better doc comment. + super(Descriptor, self).CopyToProto(proto) + + +# TODO(robinson): We should have aggressive checking here, +# for example: +# * If you specify a repeated field, you should not be allowed +# to specify a default value. +# * [Other examples here as needed]. +# +# TODO(robinson): for this and other *Descriptor classes, we +# might also want to lock things down aggressively (e.g., +# prevent clients from setting the attributes). Having +# stronger invariants here in general will reduce the number +# of runtime checks we must do in reflection.py... +class FieldDescriptor(DescriptorBase): + + """Descriptor for a single field in a .proto file. + + A FieldDescriptor instance has the following attributes: + + name: (str) Name of this field, exactly as it appears in .proto. + full_name: (str) Name of this field, including containing scope. This is + particularly relevant for extensions. + camelcase_name: (str) Camelcase name of this field. + index: (int) Dense, 0-indexed index giving the order that this + field textually appears within its message in the .proto file. + number: (int) Tag number declared for this field in the .proto file. + + type: (One of the TYPE_* constants below) Declared type. + cpp_type: (One of the CPPTYPE_* constants below) C++ type used to + represent this field. + + label: (One of the LABEL_* constants below) Tells whether this + field is optional, required, or repeated. + has_default_value: (bool) True if this field has a default value defined, + otherwise false. + default_value: (Varies) Default value of this field. Only + meaningful for non-repeated scalar fields. Repeated fields + should always set this to [], and non-repeated composite + fields should always set this to None. + + containing_type: (Descriptor) Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + Somewhat confusingly, for extension fields, this is the + descriptor of the EXTENDED message, not the descriptor + of the message containing this field. (See is_extension and + extension_scope below). + message_type: (Descriptor) If a composite field, a descriptor + of the message type contained in this field. Otherwise, this is None. + enum_type: (EnumDescriptor) If this field contains an enum, a + descriptor of that enum. Otherwise, this is None. + + is_extension: True iff this describes an extension field. + extension_scope: (Descriptor) Only meaningful if is_extension is True. + Gives the message that immediately contains this extension field. + Will be None iff we're a top-level (file-level) extension field. + + options: (descriptor_pb2.FieldOptions) Protocol message field options or + None to use default field options. + + containing_oneof: (OneofDescriptor) If the field is a member of a oneof + union, contains its descriptor. Otherwise, None. + """ + + # Must be consistent with C++ FieldDescriptor::Type enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + TYPE_DOUBLE = 1 + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + MAX_TYPE = 18 + + # Must be consistent with C++ FieldDescriptor::CppType enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + CPPTYPE_INT32 = 1 + CPPTYPE_INT64 = 2 + CPPTYPE_UINT32 = 3 + CPPTYPE_UINT64 = 4 + CPPTYPE_DOUBLE = 5 + CPPTYPE_FLOAT = 6 + CPPTYPE_BOOL = 7 + CPPTYPE_ENUM = 8 + CPPTYPE_STRING = 9 + CPPTYPE_MESSAGE = 10 + MAX_CPPTYPE = 10 + + _PYTHON_TO_CPP_PROTO_TYPE_MAP = { + TYPE_DOUBLE: CPPTYPE_DOUBLE, + TYPE_FLOAT: CPPTYPE_FLOAT, + TYPE_ENUM: CPPTYPE_ENUM, + TYPE_INT64: CPPTYPE_INT64, + TYPE_SINT64: CPPTYPE_INT64, + TYPE_SFIXED64: CPPTYPE_INT64, + TYPE_UINT64: CPPTYPE_UINT64, + TYPE_FIXED64: CPPTYPE_UINT64, + TYPE_INT32: CPPTYPE_INT32, + TYPE_SFIXED32: CPPTYPE_INT32, + TYPE_SINT32: CPPTYPE_INT32, + TYPE_UINT32: CPPTYPE_UINT32, + TYPE_FIXED32: CPPTYPE_UINT32, + TYPE_BYTES: CPPTYPE_STRING, + TYPE_STRING: CPPTYPE_STRING, + TYPE_BOOL: CPPTYPE_BOOL, + TYPE_MESSAGE: CPPTYPE_MESSAGE, + TYPE_GROUP: CPPTYPE_MESSAGE + } + + # Must be consistent with C++ FieldDescriptor::Label enum in + # descriptor.h. + # + # TODO(robinson): Find a way to eliminate this repetition. + LABEL_OPTIONAL = 1 + LABEL_REQUIRED = 2 + LABEL_REPEATED = 3 + MAX_LABEL = 3 + + # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, + # and kLastReservedNumber in descriptor.h + MAX_FIELD_NUMBER = (1 << 29) - 1 + FIRST_RESERVED_FIELD_NUMBER = 19000 + LAST_RESERVED_FIELD_NUMBER = 19999 + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FieldDescriptor + + def __new__(cls, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + has_default_value=True, containing_oneof=None): + _message.Message._CheckCalledFromGeneratedFile() + if is_extension: + return _message.default_pool.FindExtensionByName(full_name) + else: + return _message.default_pool.FindFieldByName(full_name) + + def __init__(self, name, full_name, index, number, type, cpp_type, label, + default_value, message_type, enum_type, containing_type, + is_extension, extension_scope, options=None, + has_default_value=True, containing_oneof=None): + """The arguments are as described in the description of FieldDescriptor + attributes above. + + Note that containing_type may be None, and may be set later if necessary + (to deal with circular references between message types, for example). + Likewise for extension_scope. + """ + super(FieldDescriptor, self).__init__(options, 'FieldOptions') + self.name = name + self.full_name = full_name + self._camelcase_name = None + self.index = index + self.number = number + self.type = type + self.cpp_type = cpp_type + self.label = label + self.has_default_value = has_default_value + self.default_value = default_value + self.containing_type = containing_type + self.message_type = message_type + self.enum_type = enum_type + self.is_extension = is_extension + self.extension_scope = extension_scope + self.containing_oneof = containing_oneof + if api_implementation.Type() == 'cpp': + if is_extension: + self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) + else: + self._cdescriptor = _message.default_pool.FindFieldByName(full_name) + else: + self._cdescriptor = None + + @property + def camelcase_name(self): + if self._camelcase_name is None: + self._camelcase_name = _ToCamelCase(self.name) + return self._camelcase_name + + @staticmethod + def ProtoTypeToCppProtoType(proto_type): + """Converts from a Python proto type to a C++ Proto Type. + + The Python ProtocolBuffer classes specify both the 'Python' datatype and the + 'C++' datatype - and they're not the same. This helper method should + translate from one to another. + + Args: + proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) + Returns: + descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. + Raises: + TypeTransformationError: when the Python proto type isn't known. + """ + try: + return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] + except KeyError: + raise TypeTransformationError('Unknown proto_type: %s' % proto_type) + + +class EnumDescriptor(_NestedDescriptorBase): + + """Descriptor for an enum defined in a .proto file. + + An EnumDescriptor instance has the following attributes: + + name: (str) Name of the enum type. + full_name: (str) Full name of the type, including package name + and any enclosing type(s). + + values: (list of EnumValueDescriptors) List of the values + in this enum. + values_by_name: (dict str -> EnumValueDescriptor) Same as |values|, + but indexed by the "name" field of each EnumValueDescriptor. + values_by_number: (dict int -> EnumValueDescriptor) Same as |values|, + but indexed by the "number" field of each EnumValueDescriptor. + containing_type: (Descriptor) Descriptor of the immediate containing + type of this enum, or None if this is an enum defined at the + top level in a .proto file. Set by Descriptor's constructor + if we're passed into one. + file: (FileDescriptor) Reference to file descriptor. + options: (descriptor_pb2.EnumOptions) Enum options message or + None to use default enum options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumDescriptor + + def __new__(cls, name, full_name, filename, values, + containing_type=None, options=None, file=None, + serialized_start=None, serialized_end=None): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindEnumTypeByName(full_name) + + def __init__(self, name, full_name, filename, values, + containing_type=None, options=None, file=None, + serialized_start=None, serialized_end=None): + """Arguments are as described in the attribute description above. + + Note that filename is an obsolete argument, that is not used anymore. + Please use file.name to access this as an attribute. + """ + super(EnumDescriptor, self).__init__( + options, 'EnumOptions', name, full_name, file, + containing_type, serialized_start=serialized_start, + serialized_end=serialized_end) + + self.values = values + for value in self.values: + value.type = self + self.values_by_name = dict((v.name, v) for v in values) + self.values_by_number = dict((v.number, v) for v in values) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.EnumDescriptorProto. + + Args: + proto: An empty descriptor_pb2.EnumDescriptorProto. + """ + # This function is overriden to give a better doc comment. + super(EnumDescriptor, self).CopyToProto(proto) + + +class EnumValueDescriptor(DescriptorBase): + + """Descriptor for a single value within an enum. + + name: (str) Name of this value. + index: (int) Dense, 0-indexed index giving the order that this + value appears textually within its enum in the .proto file. + number: (int) Actual number assigned to this enum value. + type: (EnumDescriptor) EnumDescriptor to which this value + belongs. Set by EnumDescriptor's constructor if we're + passed into one. + options: (descriptor_pb2.EnumValueOptions) Enum value options message or + None to use default enum value options options. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor + + def __new__(cls, name, index, number, type=None, options=None): + _message.Message._CheckCalledFromGeneratedFile() + # There is no way we can build a complete EnumValueDescriptor with the + # given parameters (the name of the Enum is not known, for example). + # Fortunately generated files just pass it to the EnumDescriptor() + # constructor, which will ignore it, so returning None is good enough. + return None + + def __init__(self, name, index, number, type=None, options=None): + """Arguments are as described in the attribute description above.""" + super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions') + self.name = name + self.index = index + self.number = number + self.type = type + + +class OneofDescriptor(object): + """Descriptor for a oneof field. + + name: (str) Name of the oneof field. + full_name: (str) Full name of the oneof field, including package name. + index: (int) 0-based index giving the order of the oneof field inside + its containing type. + containing_type: (Descriptor) Descriptor of the protocol message + type that contains this field. Set by the Descriptor constructor + if we're passed into one. + fields: (list of FieldDescriptor) The list of field descriptors this + oneof can contain. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.OneofDescriptor + + def __new__(cls, name, full_name, index, containing_type, fields): + _message.Message._CheckCalledFromGeneratedFile() + return _message.default_pool.FindOneofByName(full_name) + + def __init__(self, name, full_name, index, containing_type, fields): + """Arguments are as described in the attribute description above.""" + self.name = name + self.full_name = full_name + self.index = index + self.containing_type = containing_type + self.fields = fields + + +class ServiceDescriptor(_NestedDescriptorBase): + + """Descriptor for a service. + + name: (str) Name of the service. + full_name: (str) Full name of the service, including package name. + index: (int) 0-indexed index giving the order that this services + definition appears withing the .proto file. + methods: (list of MethodDescriptor) List of methods provided by this + service. + options: (descriptor_pb2.ServiceOptions) Service options message or + None to use default service options. + file: (FileDescriptor) Reference to file info. + """ + + def __init__(self, name, full_name, index, methods, options=None, file=None, + serialized_start=None, serialized_end=None): + super(ServiceDescriptor, self).__init__( + options, 'ServiceOptions', name, full_name, file, + None, serialized_start=serialized_start, + serialized_end=serialized_end) + self.index = index + self.methods = methods + # Set the containing service for each method in this service. + for method in self.methods: + method.containing_service = self + + def FindMethodByName(self, name): + """Searches for the specified method, and returns its descriptor.""" + for method in self.methods: + if name == method.name: + return method + return None + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.ServiceDescriptorProto. + + Args: + proto: An empty descriptor_pb2.ServiceDescriptorProto. + """ + # This function is overriden to give a better doc comment. + super(ServiceDescriptor, self).CopyToProto(proto) + + +class MethodDescriptor(DescriptorBase): + + """Descriptor for a method in a service. + + name: (str) Name of the method within the service. + full_name: (str) Full name of method. + index: (int) 0-indexed index of the method inside the service. + containing_service: (ServiceDescriptor) The service that contains this + method. + input_type: The descriptor of the message that this method accepts. + output_type: The descriptor of the message that this method returns. + options: (descriptor_pb2.MethodOptions) Method options message or + None to use default method options. + """ + + def __init__(self, name, full_name, index, containing_service, + input_type, output_type, options=None): + """The arguments are as described in the description of MethodDescriptor + attributes above. + + Note that containing_service may be None, and may be set later if necessary. + """ + super(MethodDescriptor, self).__init__(options, 'MethodOptions') + self.name = name + self.full_name = full_name + self.index = index + self.containing_service = containing_service + self.input_type = input_type + self.output_type = output_type + + +class FileDescriptor(DescriptorBase): + """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. + + Note that enum_types_by_name, extensions_by_name, and dependencies + fields are only set by the message_factory module, and not by the + generated proto code. + + name: name of file, relative to root of source tree. + package: name of the package + syntax: string indicating syntax of the file (can be "proto2" or "proto3") + serialized_pb: (str) Byte string of serialized + descriptor_pb2.FileDescriptorProto. + dependencies: List of other FileDescriptors this FileDescriptor depends on. + message_types_by_name: Dict of message names of their descriptors. + enum_types_by_name: Dict of enum names and their descriptors. + extensions_by_name: Dict of extension names and their descriptors. + pool: the DescriptorPool this descriptor belongs to. When not passed to the + constructor, the global default pool is used. + """ + + if _USE_C_DESCRIPTORS: + _C_DESCRIPTOR_CLASS = _message.FileDescriptor + + def __new__(cls, name, package, options=None, serialized_pb=None, + dependencies=None, syntax=None, pool=None): + # FileDescriptor() is called from various places, not only from generated + # files, to register dynamic proto files and messages. + if serialized_pb: + # TODO(amauryfa): use the pool passed as argument. This will work only + # for C++-implemented DescriptorPools. + return _message.default_pool.AddSerializedFile(serialized_pb) + else: + return super(FileDescriptor, cls).__new__(cls) + + def __init__(self, name, package, options=None, serialized_pb=None, + dependencies=None, syntax=None, pool=None): + """Constructor.""" + super(FileDescriptor, self).__init__(options, 'FileOptions') + + if pool is None: + from google.protobuf import descriptor_pool + pool = descriptor_pool.Default() + self.pool = pool + self.message_types_by_name = {} + self.name = name + self.package = package + self.syntax = syntax or "proto2" + self.serialized_pb = serialized_pb + + self.enum_types_by_name = {} + self.extensions_by_name = {} + self.dependencies = (dependencies or []) + + if (api_implementation.Type() == 'cpp' and + self.serialized_pb is not None): + _message.default_pool.AddSerializedFile(self.serialized_pb) + + def CopyToProto(self, proto): + """Copies this to a descriptor_pb2.FileDescriptorProto. + + Args: + proto: An empty descriptor_pb2.FileDescriptorProto. + """ + proto.ParseFromString(self.serialized_pb) + + +def _ParseOptions(message, string): + """Parses serialized options. + + This helper function is used to parse serialized options in generated + proto2 files. It must not be used outside proto2. + """ + message.ParseFromString(string) + return message + + +def _ToCamelCase(name): + """Converts name to camel-case and returns it.""" + capitalize_next = False + result = [] + + for c in name: + if c == '_': + if result: + capitalize_next = True + elif capitalize_next: + result.append(c.upper()) + capitalize_next = False + else: + result += c + + # Lower-case the first letter. + if result and result[0].isupper(): + result[0] = result[0].lower() + return ''.join(result) + + +def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, + syntax=None): + """Make a protobuf Descriptor given a DescriptorProto protobuf. + + Handles nested descriptors. Note that this is limited to the scope of defining + a message inside of another message. Composite fields can currently only be + resolved if the message is defined in the same scope as the field. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: Optional package name for the new message Descriptor (string). + build_file_if_cpp: Update the C++ descriptor pool if api matches. + Set to False on recursion, so no duplicates are created. + syntax: The syntax/semantics that should be used. Set to "proto3" to get + proto3 field presence semantics. + Returns: + A Descriptor for protobuf messages. + """ + if api_implementation.Type() == 'cpp' and build_file_if_cpp: + # The C++ implementation requires all descriptors to be backed by the same + # definition in the C++ descriptor pool. To do this, we build a + # FileDescriptorProto with the same definition as this descriptor and build + # it into the pool. + from google.protobuf import descriptor_pb2 + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.message_type.add().MergeFrom(desc_proto) + + # Generate a random name for this proto file to prevent conflicts with any + # imported ones. We need to specify a file name so the descriptor pool + # accepts our FileDescriptorProto, but it is not important what that file + # name is actually set to. + proto_name = str(uuid.uuid4()) + + if package: + file_descriptor_proto.name = os.path.join(package.replace('.', '/'), + proto_name + '.proto') + file_descriptor_proto.package = package + else: + file_descriptor_proto.name = proto_name + '.proto' + + _message.default_pool.Add(file_descriptor_proto) + result = _message.default_pool.FindFileByName(file_descriptor_proto.name) + + if _USE_C_DESCRIPTORS: + return result.message_types_by_name[desc_proto.name] + + full_message_name = [desc_proto.name] + if package: full_message_name.insert(0, package) + + # Create Descriptors for enum types + enum_types = {} + for enum_proto in desc_proto.enum_type: + full_name = '.'.join(full_message_name + [enum_proto.name]) + enum_desc = EnumDescriptor( + enum_proto.name, full_name, None, [ + EnumValueDescriptor(enum_val.name, ii, enum_val.number) + for ii, enum_val in enumerate(enum_proto.value)]) + enum_types[full_name] = enum_desc + + # Create Descriptors for nested types + nested_types = {} + for nested_proto in desc_proto.nested_type: + full_name = '.'.join(full_message_name + [nested_proto.name]) + # Nested types are just those defined inside of the message, not all types + # used by fields in the message, so no loops are possible here. + nested_desc = MakeDescriptor(nested_proto, + package='.'.join(full_message_name), + build_file_if_cpp=False, + syntax=syntax) + nested_types[full_name] = nested_desc + + fields = [] + for field_proto in desc_proto.field: + full_name = '.'.join(full_message_name + [field_proto.name]) + enum_desc = None + nested_desc = None + if field_proto.HasField('type_name'): + type_name = field_proto.type_name + full_type_name = '.'.join(full_message_name + + [type_name[type_name.rfind('.')+1:]]) + if full_type_name in nested_types: + nested_desc = nested_types[full_type_name] + elif full_type_name in enum_types: + enum_desc = enum_types[full_type_name] + # Else type_name references a non-local type, which isn't implemented + field = FieldDescriptor( + field_proto.name, full_name, field_proto.number - 1, + field_proto.number, field_proto.type, + FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), + field_proto.label, None, nested_desc, enum_desc, None, False, None, + options=field_proto.options, has_default_value=False) + fields.append(field) + + desc_name = '.'.join(full_message_name) + return Descriptor(desc_proto.name, desc_name, None, None, fields, + list(nested_types.values()), list(enum_types.values()), [], + options=desc_proto.options) diff --git a/deps/google/protobuf/descriptor_database.py b/deps/google/protobuf/descriptor_database.py new file mode 100644 index 00000000..1333f996 --- /dev/null +++ b/deps/google/protobuf/descriptor_database.py @@ -0,0 +1,141 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a container for DescriptorProtos.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + + +class Error(Exception): + pass + + +class DescriptorDatabaseConflictingDefinitionError(Error): + """Raised when a proto is added with the same name & different descriptor.""" + + +class DescriptorDatabase(object): + """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" + + def __init__(self): + self._file_desc_protos_by_file = {} + self._file_desc_protos_by_symbol = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this database. + + Args: + file_desc_proto: The FileDescriptorProto to add. + Raises: + DescriptorDatabaseException: if an attempt is made to add a proto + with the same name but different definition than an exisiting + proto in the database. + """ + proto_name = file_desc_proto.name + if proto_name not in self._file_desc_protos_by_file: + self._file_desc_protos_by_file[proto_name] = file_desc_proto + elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: + raise DescriptorDatabaseConflictingDefinitionError( + '%s already added, but with different descriptor.' % proto_name) + + # Add the top-level Message, Enum and Extension descriptors to the index. + package = file_desc_proto.package + for message in file_desc_proto.message_type: + self._file_desc_protos_by_symbol.update( + (name, file_desc_proto) for name in _ExtractSymbols(message, package)) + for enum in file_desc_proto.enum_type: + self._file_desc_protos_by_symbol[ + '.'.join((package, enum.name))] = file_desc_proto + for extension in file_desc_proto.extension: + self._file_desc_protos_by_symbol[ + '.'.join((package, extension.name))] = file_desc_proto + + def FindFileByName(self, name): + """Finds the file descriptor proto by file name. + + Typically the file name is a relative path ending to a .proto file. The + proto with the given name will have to have been added to this database + using the Add method or else an error will be raised. + + Args: + name: The file name to find. + + Returns: + The file descriptor proto matching the name. + + Raises: + KeyError if no file by the given name was added. + """ + + return self._file_desc_protos_by_file[name] + + def FindFileContainingSymbol(self, symbol): + """Finds the file descriptor proto containing the specified symbol. + + The symbol should be a fully qualified name including the file descriptor's + package and any containing messages. Some examples: + + 'some.package.name.Message' + 'some.package.name.Message.NestedEnum' + + The file descriptor proto containing the specified symbol must be added to + this database using the Add method or else an error will be raised. + + Args: + symbol: The fully qualified symbol name. + + Returns: + The file descriptor proto containing the symbol. + + Raises: + KeyError if no file contains the specified symbol. + """ + + return self._file_desc_protos_by_symbol[symbol] + + +def _ExtractSymbols(desc_proto, package): + """Pulls out all the symbols from a descriptor proto. + + Args: + desc_proto: The proto to extract symbols from. + package: The package containing the descriptor type. + + Yields: + The fully qualified name found in the descriptor. + """ + + message_name = '.'.join((package, desc_proto.name)) + yield message_name + for nested_type in desc_proto.nested_type: + for symbol in _ExtractSymbols(nested_type, message_name): + yield symbol + for enum_type in desc_proto.enum_type: + yield '.'.join((message_name, enum_type.name)) diff --git a/deps/google/protobuf/descriptor_pb2.py b/deps/google/protobuf/descriptor_pb2.py new file mode 100644 index 00000000..a2f14b9c --- /dev/null +++ b/deps/google/protobuf/descriptor_pb2.py @@ -0,0 +1,1704 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/descriptor.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/descriptor.proto', + package='google.protobuf', + syntax='proto2', + serialized_pb=_b('\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xf0\x04\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a,\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"\xbc\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"$\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xaa\x05\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12,\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08:\x05\x66\x61lse\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x05\x66\x61lse\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\'\n\x1fjavanano_use_deprecated_package\x18& \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xe6\x01\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x98\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x8d\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"z\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\tBX\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z\ndescriptor\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.protobuf.FieldDescriptorProto.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=2, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=3, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=4, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=5, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=6, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=7, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=8, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=9, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=10, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=11, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=12, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=13, number=14, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=14, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=15, number=16, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=16, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=17, number=18, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1553, + serialized_end=1863, +) +_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) + +_FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.protobuf.FieldDescriptorProto.Label', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='LABEL_OPTIONAL', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LABEL_REQUIRED', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LABEL_REPEATED', index=2, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1865, + serialized_end=1932, +) +_sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) + +_FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( + name='OptimizeMode', + full_name='google.protobuf.FileOptions.OptimizeMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SPEED', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CODE_SIZE', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LITE_RUNTIME', index=2, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3182, + serialized_end=3240, +) +_sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) + +_FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( + name='CType', + full_name='google.protobuf.FieldOptions.CType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CORD', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STRING_PIECE', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3782, + serialized_end=3829, +) +_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) + +_FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( + name='JSType', + full_name='google.protobuf.FieldOptions.JSType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='JS_NORMAL', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JS_STRING', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JS_NUMBER', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3831, + serialized_end=3884, +) +_sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) + + +_FILEDESCRIPTORSET = _descriptor.Descriptor( + name='FileDescriptorSet', + full_name='google.protobuf.FileDescriptorSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=53, + serialized_end=124, +) + + +_FILEDESCRIPTORPROTO = _descriptor.Descriptor( + name='FileDescriptorProto', + full_name='google.protobuf.FileDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, + number=10, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, + number=11, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=127, + serialized_end=602, +) + + +_DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( + name='ExtensionRange', + full_name='google.protobuf.DescriptorProto.ExtensionRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1140, + serialized_end=1184, +) + +_DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( + name='ReservedRange', + full_name='google.protobuf.DescriptorProto.ReservedRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1186, + serialized_end=1229, +) + +_DESCRIPTORPROTO = _descriptor.Descriptor( + name='DescriptorProto', + full_name='google.protobuf.DescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.DescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='field', full_name='google.protobuf.DescriptorProto.field', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.DescriptorProto.options', index=7, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, + number=10, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=605, + serialized_end=1229, +) + + +_FIELDDESCRIPTORPROTO = _descriptor.Descriptor( + name='FieldDescriptorProto', + full_name='google.protobuf.FieldDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDDESCRIPTORPROTO_TYPE, + _FIELDDESCRIPTORPROTO_LABEL, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1232, + serialized_end=1932, +) + + +_ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( + name='OneofDescriptorProto', + full_name='google.protobuf.OneofDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1934, + serialized_end=1970, +) + + +_ENUMDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumDescriptorProto', + full_name='google.protobuf.EnumDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1973, + serialized_end=2113, +) + + +_ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( + name='EnumValueDescriptorProto', + full_name='google.protobuf.EnumValueDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2115, + serialized_end=2223, +) + + +_SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( + name='ServiceDescriptorProto', + full_name='google.protobuf.ServiceDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2226, + serialized_end=2370, +) + + +_METHODDESCRIPTORPROTO = _descriptor.Descriptor( + name='MethodDescriptorProto', + full_name='google.protobuf.MethodDescriptorProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2373, + serialized_end=2566, +) + + +_FILEOPTIONS = _descriptor.Descriptor( + name='FileOptions', + full_name='google.protobuf.FileOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, + number=20, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, + number=27, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, + number=9, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, + number=16, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, + number=17, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, + number=18, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=10, + number=23, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=11, + number=31, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=12, + number=36, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=13, + number=37, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='javanano_use_deprecated_package', full_name='google.protobuf.FileOptions.javanano_use_deprecated_package', index=14, + number=38, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=15, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FILEOPTIONS_OPTIMIZEMODE, + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=2569, + serialized_end=3251, +) + + +_MESSAGEOPTIONS = _descriptor.Descriptor( + name='MessageOptions', + full_name='google.protobuf.MessageOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=3254, + serialized_end=3484, +) + + +_FIELDOPTIONS = _descriptor.Descriptor( + name='FieldOptions', + full_name='google.protobuf.FieldOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, + number=6, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=4, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='weak', full_name='google.protobuf.FieldOptions.weak', index=5, + number=10, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=6, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELDOPTIONS_CTYPE, + _FIELDOPTIONS_JSTYPE, + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=3487, + serialized_end=3895, +) + + +_ENUMOPTIONS = _descriptor.Descriptor( + name='EnumOptions', + full_name='google.protobuf.EnumOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=3898, + serialized_end=4039, +) + + +_ENUMVALUEOPTIONS = _descriptor.Descriptor( + name='EnumValueOptions', + full_name='google.protobuf.EnumValueOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=4041, + serialized_end=4166, +) + + +_SERVICEOPTIONS = _descriptor.Descriptor( + name='ServiceOptions', + full_name='google.protobuf.ServiceOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=4168, + serialized_end=4291, +) + + +_METHODOPTIONS = _descriptor.Descriptor( + name='MethodOptions', + full_name='google.protobuf.MethodOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, + number=33, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=1, + number=999, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=4293, + serialized_end=4415, +) + + +_UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( + name='NamePart', + full_name='google.protobuf.UninterpretedOption.NamePart', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, + number=2, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4653, + serialized_end=4704, +) + +_UNINTERPRETEDOPTION = _descriptor.Descriptor( + name='UninterpretedOption', + full_name='google.protobuf.UninterpretedOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, + number=5, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, + number=6, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4418, + serialized_end=4704, +) + + +_SOURCECODEINFO_LOCATION = _descriptor.Descriptor( + name='Location', + full_name='google.protobuf.SourceCodeInfo.Location', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, + number=6, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4786, + serialized_end=4920, +) + +_SOURCECODEINFO = _descriptor.Descriptor( + name='SourceCodeInfo', + full_name='google.protobuf.SourceCodeInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_SOURCECODEINFO_LOCATION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4707, + serialized_end=4920, +) + +_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO +_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO +_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO +_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO +_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO +_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS +_FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO +_DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO +_DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO +_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO +_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO +_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO +_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO +_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE +_DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO +_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS +_DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE +_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL +_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE +_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS +_FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO +_FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO +_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO +_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS +_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS +_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO +_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS +_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS +_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE +_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS +_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE +_FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE +_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS +_FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS +_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION +_UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION +_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART +_SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO +_SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION +DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET +DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO +DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS +DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS +DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS +DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS +DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS +DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS +DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS +DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION +DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO + +FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), dict( + DESCRIPTOR = _FILEDESCRIPTORSET, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet) + )) +_sym_db.RegisterMessage(FileDescriptorSet) + +FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _FILEDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto) + )) +_sym_db.RegisterMessage(FileDescriptorProto) + +DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), dict( + + ExtensionRange = _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), dict( + DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange) + )) + , + + ReservedRange = _reflection.GeneratedProtocolMessageType('ReservedRange', (_message.Message,), dict( + DESCRIPTOR = _DESCRIPTORPROTO_RESERVEDRANGE, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ReservedRange) + )) + , + DESCRIPTOR = _DESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto) + )) +_sym_db.RegisterMessage(DescriptorProto) +_sym_db.RegisterMessage(DescriptorProto.ExtensionRange) +_sym_db.RegisterMessage(DescriptorProto.ReservedRange) + +FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _FIELDDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto) + )) +_sym_db.RegisterMessage(FieldDescriptorProto) + +OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _ONEOFDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.OneofDescriptorProto) + )) +_sym_db.RegisterMessage(OneofDescriptorProto) + +EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _ENUMDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto) + )) +_sym_db.RegisterMessage(EnumDescriptorProto) + +EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto) + )) +_sym_db.RegisterMessage(EnumValueDescriptorProto) + +ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _SERVICEDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto) + )) +_sym_db.RegisterMessage(ServiceDescriptorProto) + +MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), dict( + DESCRIPTOR = _METHODDESCRIPTORPROTO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto) + )) +_sym_db.RegisterMessage(MethodDescriptorProto) + +FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), dict( + DESCRIPTOR = _FILEOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FileOptions) + )) +_sym_db.RegisterMessage(FileOptions) + +MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), dict( + DESCRIPTOR = _MESSAGEOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions) + )) +_sym_db.RegisterMessage(MessageOptions) + +FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), dict( + DESCRIPTOR = _FIELDOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions) + )) +_sym_db.RegisterMessage(FieldOptions) + +EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), dict( + DESCRIPTOR = _ENUMOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions) + )) +_sym_db.RegisterMessage(EnumOptions) + +EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), dict( + DESCRIPTOR = _ENUMVALUEOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions) + )) +_sym_db.RegisterMessage(EnumValueOptions) + +ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), dict( + DESCRIPTOR = _SERVICEOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions) + )) +_sym_db.RegisterMessage(ServiceOptions) + +MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), dict( + DESCRIPTOR = _METHODOPTIONS, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions) + )) +_sym_db.RegisterMessage(MethodOptions) + +UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), dict( + + NamePart = _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), dict( + DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart) + )) + , + DESCRIPTOR = _UNINTERPRETEDOPTION, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption) + )) +_sym_db.RegisterMessage(UninterpretedOption) +_sym_db.RegisterMessage(UninterpretedOption.NamePart) + +SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), dict( + + Location = _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), dict( + DESCRIPTOR = _SOURCECODEINFO_LOCATION, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location) + )) + , + DESCRIPTOR = _SOURCECODEINFO, + __module__ = 'google.protobuf.descriptor_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo) + )) +_sym_db.RegisterMessage(SourceCodeInfo) +_sym_db.RegisterMessage(SourceCodeInfo.Location) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/descriptor_pool.py b/deps/google/protobuf/descriptor_pool.py new file mode 100644 index 00000000..3e80795c --- /dev/null +++ b/deps/google/protobuf/descriptor_pool.py @@ -0,0 +1,749 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides DescriptorPool to use as a container for proto2 descriptors. + +The DescriptorPool is used in conjection with a DescriptorDatabase to maintain +a collection of protocol buffer descriptors for use when dynamically creating +message types at runtime. + +For most applications protocol buffers should be used via modules generated by +the protocol buffer compiler tool. This should only be used when the type of +protocol buffers used in an application or library cannot be predetermined. + +Below is a straightforward example on how to use this class: + + pool = DescriptorPool() + file_descriptor_protos = [ ... ] + for file_descriptor_proto in file_descriptor_protos: + pool.Add(file_descriptor_proto) + my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') + +The message descriptor can be used in conjunction with the message_factory +module in order to create a protocol buffer class that can be encoded and +decoded. + +If you want to get a Python class for the specified proto, use the +helper functions inside google.protobuf.message_factory +directly instead of this class. +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import text_encoding + + +_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS + + +def _NormalizeFullyQualifiedName(name): + """Remove leading period from fully-qualified type name. + + Due to b/13860351 in descriptor_database.py, types in the root namespace are + generated with a leading period. This function removes that prefix. + + Args: + name: A str, the fully-qualified symbol name. + + Returns: + A str, the normalized fully-qualified symbol name. + """ + return name.lstrip('.') + + +class DescriptorPool(object): + """A collection of protobufs dynamically constructed by descriptor protos.""" + + if _USE_C_DESCRIPTORS: + + def __new__(cls, descriptor_db=None): + # pylint: disable=protected-access + return descriptor._message.DescriptorPool(descriptor_db) + + def __init__(self, descriptor_db=None): + """Initializes a Pool of proto buffs. + + The descriptor_db argument to the constructor is provided to allow + specialized file descriptor proto lookup code to be triggered on demand. An + example would be an implementation which will read and compile a file + specified in a call to FindFileByName() and not require the call to Add() + at all. Results from this database will be cached internally here as well. + + Args: + descriptor_db: A secondary source of file descriptors. + """ + + self._internal_db = descriptor_database.DescriptorDatabase() + self._descriptor_db = descriptor_db + self._descriptors = {} + self._enum_descriptors = {} + self._file_descriptors = {} + + def Add(self, file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + file_desc_proto: The FileDescriptorProto to add. + """ + + self._internal_db.Add(file_desc_proto) + + def AddSerializedFile(self, serialized_file_desc_proto): + """Adds the FileDescriptorProto and its types to this pool. + + Args: + serialized_file_desc_proto: A bytes string, serialization of the + FileDescriptorProto to add. + """ + + # pylint: disable=g-import-not-at-top + from google.protobuf import descriptor_pb2 + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + serialized_file_desc_proto) + self.Add(file_desc_proto) + + def AddDescriptor(self, desc): + """Adds a Descriptor to the pool, non-recursively. + + If the Descriptor contains nested messages or enums, the caller must + explicitly register them. This method also registers the FileDescriptor + associated with the message. + + Args: + desc: A Descriptor. + """ + if not isinstance(desc, descriptor.Descriptor): + raise TypeError('Expected instance of descriptor.Descriptor.') + + self._descriptors[desc.full_name] = desc + self.AddFileDescriptor(desc.file) + + def AddEnumDescriptor(self, enum_desc): + """Adds an EnumDescriptor to the pool. + + This method also registers the FileDescriptor associated with the message. + + Args: + enum_desc: An EnumDescriptor. + """ + + if not isinstance(enum_desc, descriptor.EnumDescriptor): + raise TypeError('Expected instance of descriptor.EnumDescriptor.') + + self._enum_descriptors[enum_desc.full_name] = enum_desc + self.AddFileDescriptor(enum_desc.file) + + def AddFileDescriptor(self, file_desc): + """Adds a FileDescriptor to the pool, non-recursively. + + If the FileDescriptor contains messages or enums, the caller must explicitly + register them. + + Args: + file_desc: A FileDescriptor. + """ + + if not isinstance(file_desc, descriptor.FileDescriptor): + raise TypeError('Expected instance of descriptor.FileDescriptor.') + self._file_descriptors[file_desc.name] = file_desc + + def FindFileByName(self, file_name): + """Gets a FileDescriptor by file name. + + Args: + file_name: The path to the file to get a descriptor for. + + Returns: + A FileDescriptor for the named file. + + Raises: + KeyError: if the file can not be found in the pool. + """ + + try: + return self._file_descriptors[file_name] + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileByName(file_name) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileByName(file_name) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file named %s' % file_name) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindFileContainingSymbol(self, symbol): + """Gets the FileDescriptor for the file containing the specified symbol. + + Args: + symbol: The name of the symbol to search for. + + Returns: + A FileDescriptor that contains the specified symbol. + + Raises: + KeyError: if the file can not be found in the pool. + """ + + symbol = _NormalizeFullyQualifiedName(symbol) + try: + return self._descriptors[symbol].file + except KeyError: + pass + + try: + return self._enum_descriptors[symbol].file + except KeyError: + pass + + try: + file_proto = self._internal_db.FindFileContainingSymbol(symbol) + except KeyError as error: + if self._descriptor_db: + file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) + else: + raise error + if not file_proto: + raise KeyError('Cannot find a file containing %s' % symbol) + return self._ConvertFileProtoToFileDescriptor(file_proto) + + def FindMessageTypeByName(self, full_name): + """Loads the named descriptor from the pool. + + Args: + full_name: The full name of the descriptor to load. + + Returns: + The descriptor for the named type. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._descriptors: + self.FindFileContainingSymbol(full_name) + return self._descriptors[full_name] + + def FindEnumTypeByName(self, full_name): + """Loads the named enum descriptor from the pool. + + Args: + full_name: The full name of the enum descriptor to load. + + Returns: + The enum descriptor for the named type. + """ + + full_name = _NormalizeFullyQualifiedName(full_name) + if full_name not in self._enum_descriptors: + self.FindFileContainingSymbol(full_name) + return self._enum_descriptors[full_name] + + def FindFieldByName(self, full_name): + """Loads the named field descriptor from the pool. + + Args: + full_name: The full name of the field descriptor to load. + + Returns: + The field descriptor for the named field. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, field_name = full_name.rpartition('.') + message_descriptor = self.FindMessageTypeByName(message_name) + return message_descriptor.fields_by_name[field_name] + + def FindExtensionByName(self, full_name): + """Loads the named extension descriptor from the pool. + + Args: + full_name: The full name of the extension descriptor to load. + + Returns: + A FieldDescriptor, describing the named extension. + """ + full_name = _NormalizeFullyQualifiedName(full_name) + message_name, _, extension_name = full_name.rpartition('.') + try: + # Most extensions are nested inside a message. + scope = self.FindMessageTypeByName(message_name) + except KeyError: + # Some extensions are defined at file scope. + scope = self.FindFileContainingSymbol(full_name) + return scope.extensions_by_name[extension_name] + + def _ConvertFileProtoToFileDescriptor(self, file_proto): + """Creates a FileDescriptor from a proto or returns a cached copy. + + This method also has the side effect of loading all the symbols found in + the file into the appropriate dictionaries in the pool. + + Args: + file_proto: The proto to convert. + + Returns: + A FileDescriptor matching the passed in proto. + """ + + if file_proto.name not in self._file_descriptors: + built_deps = list(self._GetDeps(file_proto.dependency)) + direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] + + file_descriptor = descriptor.FileDescriptor( + pool=self, + name=file_proto.name, + package=file_proto.package, + syntax=file_proto.syntax, + options=file_proto.options, + serialized_pb=file_proto.SerializeToString(), + dependencies=direct_deps) + if _USE_C_DESCRIPTORS: + # When using C++ descriptors, all objects defined in the file were added + # to the C++ database when the FileDescriptor was built above. + # Just add them to this descriptor pool. + def _AddMessageDescriptor(message_desc): + self._descriptors[message_desc.full_name] = message_desc + for nested in message_desc.nested_types: + _AddMessageDescriptor(nested) + for enum_type in message_desc.enum_types: + _AddEnumDescriptor(enum_type) + def _AddEnumDescriptor(enum_desc): + self._enum_descriptors[enum_desc.full_name] = enum_desc + for message_type in file_descriptor.message_types_by_name.values(): + _AddMessageDescriptor(message_type) + for enum_type in file_descriptor.enum_types_by_name.values(): + _AddEnumDescriptor(enum_type) + else: + scope = {} + + # This loop extracts all the message and enum types from all the + # dependencies of the file_proto. This is necessary to create the + # scope of available message types when defining the passed in + # file proto. + for dependency in built_deps: + scope.update(self._ExtractSymbols( + dependency.message_types_by_name.values())) + scope.update((_PrefixWithDot(enum.full_name), enum) + for enum in dependency.enum_types_by_name.values()) + + for message_type in file_proto.message_type: + message_desc = self._ConvertMessageDescriptor( + message_type, file_proto.package, file_descriptor, scope, + file_proto.syntax) + file_descriptor.message_types_by_name[message_desc.name] = ( + message_desc) + + for enum_type in file_proto.enum_type: + file_descriptor.enum_types_by_name[enum_type.name] = ( + self._ConvertEnumDescriptor(enum_type, file_proto.package, + file_descriptor, None, scope)) + + for index, extension_proto in enumerate(file_proto.extension): + extension_desc = self._MakeFieldDescriptor( + extension_proto, file_proto.package, index, is_extension=True) + extension_desc.containing_type = self._GetTypeFromScope( + file_descriptor.package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, + file_descriptor.package, scope) + file_descriptor.extensions_by_name[extension_desc.name] = ( + extension_desc) + + for desc_proto in file_proto.message_type: + self._SetAllFieldTypes(file_proto.package, desc_proto, scope) + + if file_proto.package: + desc_proto_prefix = _PrefixWithDot(file_proto.package) + else: + desc_proto_prefix = '' + + for desc_proto in file_proto.message_type: + desc = self._GetTypeFromScope( + desc_proto_prefix, desc_proto.name, scope) + file_descriptor.message_types_by_name[desc_proto.name] = desc + + self.Add(file_proto) + self._file_descriptors[file_proto.name] = file_descriptor + + return self._file_descriptors[file_proto.name] + + def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, + scope=None, syntax=None): + """Adds the proto to the pool in the specified package. + + Args: + desc_proto: The descriptor_pb2.DescriptorProto protobuf message. + package: The package the proto should be located in. + file_desc: The file containing this message. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The added descriptor. + """ + + if package: + desc_name = '.'.join((package, desc_proto.name)) + else: + desc_name = desc_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + if scope is None: + scope = {} + + nested = [ + self._ConvertMessageDescriptor( + nested, desc_name, file_desc, scope, syntax) + for nested in desc_proto.nested_type] + enums = [ + self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope) + for enum in desc_proto.enum_type] + fields = [self._MakeFieldDescriptor(field, desc_name, index) + for index, field in enumerate(desc_proto.field)] + extensions = [ + self._MakeFieldDescriptor(extension, desc_name, index, + is_extension=True) + for index, extension in enumerate(desc_proto.extension)] + oneofs = [ + descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)), + index, None, []) + for index, desc in enumerate(desc_proto.oneof_decl)] + extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] + if extension_ranges: + is_extendable = True + else: + is_extendable = False + desc = descriptor.Descriptor( + name=desc_proto.name, + full_name=desc_name, + filename=file_name, + containing_type=None, + fields=fields, + oneofs=oneofs, + nested_types=nested, + enum_types=enums, + extensions=extensions, + options=desc_proto.options, + is_extendable=is_extendable, + extension_ranges=extension_ranges, + file=file_desc, + serialized_start=None, + serialized_end=None, + syntax=syntax) + for nested in desc.nested_types: + nested.containing_type = desc + for enum in desc.enum_types: + enum.containing_type = desc + for field_index, field_desc in enumerate(desc_proto.field): + if field_desc.HasField('oneof_index'): + oneof_index = field_desc.oneof_index + oneofs[oneof_index].fields.append(fields[field_index]) + fields[field_index].containing_oneof = oneofs[oneof_index] + + scope[_PrefixWithDot(desc_name)] = desc + self._descriptors[desc_name] = desc + return desc + + def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, + containing_type=None, scope=None): + """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. + + Args: + enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. + package: Optional package name for the new message EnumDescriptor. + file_desc: The file containing the enum descriptor. + containing_type: The type containing this enum. + scope: Scope containing available types. + + Returns: + The added descriptor + """ + + if package: + enum_name = '.'.join((package, enum_proto.name)) + else: + enum_name = enum_proto.name + + if file_desc is None: + file_name = None + else: + file_name = file_desc.name + + values = [self._MakeEnumValueDescriptor(value, index) + for index, value in enumerate(enum_proto.value)] + desc = descriptor.EnumDescriptor(name=enum_proto.name, + full_name=enum_name, + filename=file_name, + file=file_desc, + values=values, + containing_type=containing_type, + options=enum_proto.options) + scope['.%s' % enum_name] = desc + self._enum_descriptors[enum_name] = desc + return desc + + def _MakeFieldDescriptor(self, field_proto, message_name, index, + is_extension=False): + """Creates a field descriptor from a FieldDescriptorProto. + + For message and enum type fields, this method will do a look up + in the pool for the appropriate descriptor for that type. If it + is unavailable, it will fall back to the _source function to + create it. If this type is still unavailable, construction will + fail. + + Args: + field_proto: The proto describing the field. + message_name: The name of the containing message. + index: Index of the field + is_extension: Indication that this field is for an extension. + + Returns: + An initialized FieldDescriptor object + """ + + if message_name: + full_name = '.'.join((message_name, field_proto.name)) + else: + full_name = field_proto.name + + return descriptor.FieldDescriptor( + name=field_proto.name, + full_name=full_name, + index=index, + number=field_proto.number, + type=field_proto.type, + cpp_type=None, + message_type=None, + enum_type=None, + containing_type=None, + label=field_proto.label, + has_default_value=False, + default_value=None, + is_extension=is_extension, + extension_scope=None, + options=field_proto.options) + + def _SetAllFieldTypes(self, package, desc_proto, scope): + """Sets all the descriptor's fields's types. + + This method also sets the containing types on any extensions. + + Args: + package: The current package of desc_proto. + desc_proto: The message descriptor to update. + scope: Enclosing scope of available types. + """ + + package = _PrefixWithDot(package) + + main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) + + if package == '.': + nested_package = _PrefixWithDot(desc_proto.name) + else: + nested_package = '.'.join([package, desc_proto.name]) + + for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): + self._SetFieldType(field_proto, field_desc, nested_package, scope) + + for extension_proto, extension_desc in ( + zip(desc_proto.extension, main_desc.extensions)): + extension_desc.containing_type = self._GetTypeFromScope( + nested_package, extension_proto.extendee, scope) + self._SetFieldType(extension_proto, extension_desc, nested_package, scope) + + for nested_type in desc_proto.nested_type: + self._SetAllFieldTypes(nested_package, nested_type, scope) + + def _SetFieldType(self, field_proto, field_desc, package, scope): + """Sets the field's type, cpp_type, message_type and enum_type. + + Args: + field_proto: Data about the field in proto format. + field_desc: The descriptor to modiy. + package: The package the field's container is in. + scope: Enclosing scope of available types. + """ + if field_proto.type_name: + desc = self._GetTypeFromScope(package, field_proto.type_name, scope) + else: + desc = None + + if not field_proto.HasField('type'): + if isinstance(desc, descriptor.Descriptor): + field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE + else: + field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM + + field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( + field_proto.type) + + if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE + or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): + field_desc.message_type = desc + + if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.enum_type = desc + + if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: + field_desc.has_default_value = False + field_desc.default_value = [] + elif field_proto.HasField('default_value'): + field_desc.has_default_value = True + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = float(field_proto.default_value) + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = field_proto.default_value + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = field_proto.default_value.lower() == 'true' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values_by_name[ + field_proto.default_value].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = text_encoding.CUnescape( + field_proto.default_value) + else: + # All other types are of the "int" type. + field_desc.default_value = int(field_proto.default_value) + else: + field_desc.has_default_value = False + if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or + field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): + field_desc.default_value = 0.0 + elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: + field_desc.default_value = u'' + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: + field_desc.default_value = False + elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: + field_desc.default_value = field_desc.enum_type.values[0].number + elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: + field_desc.default_value = b'' + else: + # All other types are of the "int" type. + field_desc.default_value = 0 + + field_desc.type = field_proto.type + + def _MakeEnumValueDescriptor(self, value_proto, index): + """Creates a enum value descriptor object from a enum value proto. + + Args: + value_proto: The proto describing the enum value. + index: The index of the enum value. + + Returns: + An initialized EnumValueDescriptor object. + """ + + return descriptor.EnumValueDescriptor( + name=value_proto.name, + index=index, + number=value_proto.number, + options=value_proto.options, + type=None) + + def _ExtractSymbols(self, descriptors): + """Pulls out all the symbols from descriptor protos. + + Args: + descriptors: The messages to extract descriptors from. + Yields: + A two element tuple of the type name and descriptor object. + """ + + for desc in descriptors: + yield (_PrefixWithDot(desc.full_name), desc) + for symbol in self._ExtractSymbols(desc.nested_types): + yield symbol + for enum in desc.enum_types: + yield (_PrefixWithDot(enum.full_name), enum) + + def _GetDeps(self, dependencies): + """Recursively finds dependencies for file protos. + + Args: + dependencies: The names of the files being depended on. + + Yields: + Each direct and indirect dependency. + """ + + for dependency in dependencies: + dep_desc = self.FindFileByName(dependency) + yield dep_desc + for parent_dep in dep_desc.dependencies: + yield parent_dep + + def _GetTypeFromScope(self, package, type_name, scope): + """Finds a given type name in the current scope. + + Args: + package: The package the proto should be located in. + type_name: The name of the type to be found in the scope. + scope: Dict mapping short and full symbols to message and enum types. + + Returns: + The descriptor for the requested type. + """ + if type_name not in scope: + components = _PrefixWithDot(package).split('.') + while components: + possible_match = '.'.join(components + [type_name]) + if possible_match in scope: + type_name = possible_match + break + else: + components.pop(-1) + return scope[type_name] + + +def _PrefixWithDot(name): + return name if name.startswith('.') else '.%s' % name + + +if _USE_C_DESCRIPTORS: + # TODO(amauryfa): This pool could be constructed from Python code, when we + # support a flag like 'use_cpp_generated_pool=True'. + # pylint: disable=protected-access + _DEFAULT = descriptor._message.default_pool +else: + _DEFAULT = DescriptorPool() + + +def Default(): + return _DEFAULT diff --git a/deps/google/protobuf/duration_pb2.py b/deps/google/protobuf/duration_pb2.py new file mode 100644 index 00000000..e25cd474 --- /dev/null +++ b/deps/google/protobuf/duration_pb2.py @@ -0,0 +1,78 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/duration.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/duration.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42P\n\x13\x63om.google.protobufB\rDurationProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_DURATION = _descriptor.Descriptor( + name='Duration', + full_name='google.protobuf.Duration', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='seconds', full_name='google.protobuf.Duration.seconds', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nanos', full_name='google.protobuf.Duration.nanos', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=51, + serialized_end=93, +) + +DESCRIPTOR.message_types_by_name['Duration'] = _DURATION + +Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), dict( + DESCRIPTOR = _DURATION, + __module__ = 'google.protobuf.duration_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Duration) + )) +_sym_db.RegisterMessage(Duration) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rDurationProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/empty_pb2.py b/deps/google/protobuf/empty_pb2.py new file mode 100644 index 00000000..db4c1e9a --- /dev/null +++ b/deps/google/protobuf/empty_pb2.py @@ -0,0 +1,64 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/empty.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/empty.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyBP\n\x13\x63om.google.protobufB\nEmptyProtoP\x01\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_EMPTY = _descriptor.Descriptor( + name='Empty', + full_name='google.protobuf.Empty', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=48, + serialized_end=55, +) + +DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY + +Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), dict( + DESCRIPTOR = _EMPTY, + __module__ = 'google.protobuf.empty_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Empty) + )) +_sym_db.RegisterMessage(Empty) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\nEmptyProtoP\001\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/field_mask_pb2.py b/deps/google/protobuf/field_mask_pb2.py new file mode 100644 index 00000000..bfda7fc0 --- /dev/null +++ b/deps/google/protobuf/field_mask_pb2.py @@ -0,0 +1,71 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/field_mask.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/field_mask.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tBQ\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_FIELDMASK = _descriptor.Descriptor( + name='FieldMask', + full_name='google.protobuf.FieldMask', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='paths', full_name='google.protobuf.FieldMask.paths', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=53, + serialized_end=79, +) + +DESCRIPTOR.message_types_by_name['FieldMask'] = _FIELDMASK + +FieldMask = _reflection.GeneratedProtocolMessageType('FieldMask', (_message.Message,), dict( + DESCRIPTOR = _FIELDMASK, + __module__ = 'google.protobuf.field_mask_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FieldMask) + )) +_sym_db.RegisterMessage(FieldMask) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\016FieldMaskProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/__init__.py b/deps/google/protobuf/internal/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/deps/google/protobuf/internal/__pycache__/__init__.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..236d495b Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/_parameterized.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/_parameterized.cpython-34.pyc new file mode 100644 index 00000000..4f9716df Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/_parameterized.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/any_test_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/any_test_pb2.cpython-34.pyc new file mode 100644 index 00000000..e26e710c Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/any_test_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/api_implementation.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/api_implementation.cpython-34.pyc new file mode 100644 index 00000000..ae79b309 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/api_implementation.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/containers.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/containers.cpython-34.pyc new file mode 100644 index 00000000..ea6bc458 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/containers.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/decoder.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/decoder.cpython-34.pyc new file mode 100644 index 00000000..850a7cf8 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/decoder.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/descriptor_database_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/descriptor_database_test.cpython-34.pyc new file mode 100644 index 00000000..4aefe78d Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/descriptor_database_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/descriptor_pool_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/descriptor_pool_test.cpython-34.pyc new file mode 100644 index 00000000..221fb8d3 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/descriptor_pool_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/descriptor_pool_test1_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/descriptor_pool_test1_pb2.cpython-34.pyc new file mode 100644 index 00000000..6cf3f098 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/descriptor_pool_test1_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/descriptor_pool_test2_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/descriptor_pool_test2_pb2.cpython-34.pyc new file mode 100644 index 00000000..7060a954 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/descriptor_pool_test2_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/descriptor_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/descriptor_test.cpython-34.pyc new file mode 100644 index 00000000..8178a415 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/descriptor_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/encoder.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/encoder.cpython-34.pyc new file mode 100644 index 00000000..9cf4ce24 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/encoder.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-34.pyc new file mode 100644 index 00000000..738bd29f Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/factory_test1_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/factory_test1_pb2.cpython-34.pyc new file mode 100644 index 00000000..616d6434 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/factory_test1_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/factory_test2_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/factory_test2_pb2.cpython-34.pyc new file mode 100644 index 00000000..af93dcd1 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/factory_test2_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/generator_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/generator_test.cpython-34.pyc new file mode 100644 index 00000000..3f5175ac Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/generator_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/json_format_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/json_format_test.cpython-34.pyc new file mode 100644 index 00000000..951732a2 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/json_format_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/message_factory_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/message_factory_test.cpython-34.pyc new file mode 100644 index 00000000..a767c36d Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/message_factory_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/message_listener.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/message_listener.cpython-34.pyc new file mode 100644 index 00000000..1e517a55 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/message_listener.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/message_set_extensions_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/message_set_extensions_pb2.cpython-34.pyc new file mode 100644 index 00000000..39249a84 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/message_set_extensions_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/message_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/message_test.cpython-34.pyc new file mode 100644 index 00000000..802f2916 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/message_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/missing_enum_values_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/missing_enum_values_pb2.cpython-34.pyc new file mode 100644 index 00000000..918e1df5 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/missing_enum_values_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/more_extensions_dynamic_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/more_extensions_dynamic_pb2.cpython-34.pyc new file mode 100644 index 00000000..3641a514 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/more_extensions_dynamic_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/more_extensions_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/more_extensions_pb2.cpython-34.pyc new file mode 100644 index 00000000..01b70e0a Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/more_extensions_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/more_messages_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/more_messages_pb2.cpython-34.pyc new file mode 100644 index 00000000..04118ef9 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/more_messages_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/packed_field_test_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/packed_field_test_pb2.cpython-34.pyc new file mode 100644 index 00000000..66e56ff8 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/packed_field_test_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/proto_builder_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/proto_builder_test.cpython-34.pyc new file mode 100644 index 00000000..5fa5e1a4 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/proto_builder_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/python_message.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/python_message.cpython-34.pyc new file mode 100644 index 00000000..e90c8ebf Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/python_message.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/reflection_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/reflection_test.cpython-34.pyc new file mode 100644 index 00000000..58f49077 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/reflection_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/service_reflection_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/service_reflection_test.cpython-34.pyc new file mode 100644 index 00000000..661ec57d Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/service_reflection_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/symbol_database_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/symbol_database_test.cpython-34.pyc new file mode 100644 index 00000000..0478bdf5 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/symbol_database_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/test_bad_identifiers_pb2.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/test_bad_identifiers_pb2.cpython-34.pyc new file mode 100644 index 00000000..4548b352 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/test_bad_identifiers_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/test_util.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/test_util.cpython-34.pyc new file mode 100644 index 00000000..15327f91 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/test_util.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/text_encoding_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/text_encoding_test.cpython-34.pyc new file mode 100644 index 00000000..b00dee8a Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/text_encoding_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/text_format_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/text_format_test.cpython-34.pyc new file mode 100644 index 00000000..8daccbfd Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/text_format_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/type_checkers.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/type_checkers.cpython-34.pyc new file mode 100644 index 00000000..c0cfc461 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/type_checkers.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/unknown_fields_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/unknown_fields_test.cpython-34.pyc new file mode 100644 index 00000000..e99f12bb Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/unknown_fields_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/well_known_types.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/well_known_types.cpython-34.pyc new file mode 100644 index 00000000..f6768dc2 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/well_known_types.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/well_known_types_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/well_known_types_test.cpython-34.pyc new file mode 100644 index 00000000..acc1fee7 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/well_known_types_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/wire_format.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/wire_format.cpython-34.pyc new file mode 100644 index 00000000..36bae4a1 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/wire_format.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/__pycache__/wire_format_test.cpython-34.pyc b/deps/google/protobuf/internal/__pycache__/wire_format_test.cpython-34.pyc new file mode 100644 index 00000000..8b0289d3 Binary files /dev/null and b/deps/google/protobuf/internal/__pycache__/wire_format_test.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/_parameterized.py b/deps/google/protobuf/internal/_parameterized.py new file mode 100644 index 00000000..dea3f199 --- /dev/null +++ b/deps/google/protobuf/internal/_parameterized.py @@ -0,0 +1,443 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Adds support for parameterized tests to Python's unittest TestCase class. + +A parameterized test is a method in a test case that is invoked with different +argument tuples. + +A simple example: + + class AdditionExample(parameterized.ParameterizedTestCase): + @parameterized.Parameters( + (1, 2, 3), + (4, 5, 9), + (1, 1, 3)) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Each invocation is a separate test case and properly isolated just +like a normal test method, with its own setUp/tearDown cycle. In the +example above, there are three separate testcases, one of which will +fail due to an assertion error (1 + 1 != 3). + +Parameters for invididual test cases can be tuples (with positional parameters) +or dictionaries (with named parameters): + + class AdditionExample(parameterized.ParameterizedTestCase): + @parameterized.Parameters( + {'op1': 1, 'op2': 2, 'result': 3}, + {'op1': 4, 'op2': 5, 'result': 9}, + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + +If a parameterized test fails, the error message will show the +original test name (which is modified internally) and the arguments +for the specific invocation, which are part of the string returned by +the shortDescription() method on test cases. + +The id method of the test, used internally by the unittest framework, +is also modified to show the arguments. To make sure that test names +stay the same across several invocations, object representations like + + >>> class Foo(object): + ... pass + >>> repr(Foo()) + '<__main__.Foo object at 0x23d8610>' + +are turned into '<__main__.Foo>'. For even more descriptive names, +especially in test logs, you can use the NamedParameters decorator. In +this case, only tuples are supported, and the first parameters has to +be a string (or an object that returns an apt name when converted via +str()): + + class NamedExample(parameterized.ParameterizedTestCase): + @parameterized.NamedParameters( + ('Normal', 'aa', 'aaa', True), + ('EmptyPrefix', '', 'abc', True), + ('BothEmpty', '', '', True)) + def testStartsWith(self, prefix, string, result): + self.assertEqual(result, strings.startswith(prefix)) + +Named tests also have the benefit that they can be run individually +from the command line: + + $ testmodule.py NamedExample.testStartsWithNormal + . + -------------------------------------------------------------------- + Ran 1 test in 0.000s + + OK + +Parameterized Classes +===================== +If invocation arguments are shared across test methods in a single +ParameterizedTestCase class, instead of decorating all test methods +individually, the class itself can be decorated: + + @parameterized.Parameters( + (1, 2, 3) + (4, 5, 9)) + class ArithmeticTest(parameterized.ParameterizedTestCase): + def testAdd(self, arg1, arg2, result): + self.assertEqual(arg1 + arg2, result) + + def testSubtract(self, arg2, arg2, result): + self.assertEqual(result - arg1, arg2) + +Inputs from Iterables +===================== +If parameters should be shared across several test cases, or are dynamically +created from other sources, a single non-tuple iterable can be passed into +the decorator. This iterable will be used to obtain the test cases: + + class AdditionExample(parameterized.ParameterizedTestCase): + @parameterized.Parameters( + c.op1, c.op2, c.result for c in testcases + ) + def testAddition(self, op1, op2, result): + self.assertEqual(result, op1 + op2) + + +Single-Argument Test Methods +============================ +If a test method takes only one argument, the single argument does not need to +be wrapped into a tuple: + + class NegativeNumberExample(parameterized.ParameterizedTestCase): + @parameterized.Parameters( + -1, -3, -4, -5 + ) + def testIsNegative(self, arg): + self.assertTrue(IsNegative(arg)) +""" + +__author__ = 'tmarek@google.com (Torsten Marek)' + +import collections +import functools +import re +import types +try: + import unittest2 as unittest +except ImportError: + import unittest +import uuid + +import six + +ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') +_SEPARATOR = uuid.uuid1().hex +_FIRST_ARG = object() +_ARGUMENT_REPR = object() + + +def _CleanRepr(obj): + return ADDR_RE.sub(r'<\1>', repr(obj)) + + +# Helper function formerly from the unittest module, removed from it in +# Python 2.7. +def _StrClass(cls): + return '%s.%s' % (cls.__module__, cls.__name__) + + +def _NonStringIterable(obj): + return (isinstance(obj, collections.Iterable) and not + isinstance(obj, six.string_types)) + + +def _FormatParameterList(testcase_params): + if isinstance(testcase_params, collections.Mapping): + return ', '.join('%s=%s' % (argname, _CleanRepr(value)) + for argname, value in testcase_params.items()) + elif _NonStringIterable(testcase_params): + return ', '.join(map(_CleanRepr, testcase_params)) + else: + return _FormatParameterList((testcase_params,)) + + +class _ParameterizedTestIter(object): + """Callable and iterable class for producing new test cases.""" + + def __init__(self, test_method, testcases, naming_type): + """Returns concrete test functions for a test and a list of parameters. + + The naming_type is used to determine the name of the concrete + functions as reported by the unittest framework. If naming_type is + _FIRST_ARG, the testcases must be tuples, and the first element must + have a string representation that is a valid Python identifier. + + Args: + test_method: The decorated test method. + testcases: (list of tuple/dict) A list of parameter + tuples/dicts for individual test invocations. + naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. + """ + self._test_method = test_method + self.testcases = testcases + self._naming_type = naming_type + + def __call__(self, *args, **kwargs): + raise RuntimeError('You appear to be running a parameterized test case ' + 'without having inherited from parameterized.' + 'ParameterizedTestCase. This is bad because none of ' + 'your test cases are actually being run.') + + def __iter__(self): + test_method = self._test_method + naming_type = self._naming_type + + def MakeBoundParamTest(testcase_params): + @functools.wraps(test_method) + def BoundParamTest(self): + if isinstance(testcase_params, collections.Mapping): + test_method(self, **testcase_params) + elif _NonStringIterable(testcase_params): + test_method(self, *testcase_params) + else: + test_method(self, testcase_params) + + if naming_type is _FIRST_ARG: + # Signal the metaclass that the name of the test function is unique + # and descriptive. + BoundParamTest.__x_use_name__ = True + BoundParamTest.__name__ += str(testcase_params[0]) + testcase_params = testcase_params[1:] + elif naming_type is _ARGUMENT_REPR: + # __x_extra_id__ is used to pass naming information to the __new__ + # method of TestGeneratorMetaclass. + # The metaclass will make sure to create a unique, but nondescriptive + # name for this test. + BoundParamTest.__x_extra_id__ = '(%s)' % ( + _FormatParameterList(testcase_params),) + else: + raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) + + BoundParamTest.__doc__ = '%s(%s)' % ( + BoundParamTest.__name__, _FormatParameterList(testcase_params)) + if test_method.__doc__: + BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) + return BoundParamTest + return (MakeBoundParamTest(c) for c in self.testcases) + + +def _IsSingletonList(testcases): + """True iff testcases contains only a single non-tuple element.""" + return len(testcases) == 1 and not isinstance(testcases[0], tuple) + + +def _ModifyClass(class_object, testcases, naming_type): + assert not getattr(class_object, '_id_suffix', None), ( + 'Cannot add parameters to %s,' + ' which already has parameterized methods.' % (class_object,)) + class_object._id_suffix = id_suffix = {} + # We change the size of __dict__ while we iterate over it, + # which Python 3.x will complain about, so use copy(). + for name, obj in class_object.__dict__.copy().items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) + and isinstance(obj, types.FunctionType)): + delattr(class_object, name) + methods = {} + _UpdateClassDictForParamTestCase( + methods, id_suffix, name, + _ParameterizedTestIter(obj, testcases, naming_type)) + for name, meth in methods.items(): + setattr(class_object, name, meth) + + +def _ParameterDecorator(naming_type, testcases): + """Implementation of the parameterization decorators. + + Args: + naming_type: The naming type. + testcases: Testcase parameters. + + Returns: + A function for modifying the decorated object. + """ + def _Apply(obj): + if isinstance(obj, type): + _ModifyClass( + obj, + list(testcases) if not isinstance(testcases, collections.Sequence) + else testcases, + naming_type) + return obj + else: + return _ParameterizedTestIter(obj, testcases, naming_type) + + if _IsSingletonList(testcases): + assert _NonStringIterable(testcases[0]), ( + 'Single parameter argument must be a non-string iterable') + testcases = testcases[0] + + return _Apply + + +def Parameters(*testcases): + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples/dicts/objects (for tests + with only one argument). + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_ARGUMENT_REPR, testcases) + + +def NamedParameters(*testcases): + """A decorator for creating parameterized tests. + + See the module docstring for a usage example. The first element of + each parameter tuple should be a string and will be appended to the + name of the test method. + + Args: + *testcases: Parameters for the decorated method, either a single + iterable, or a list of tuples. + + Returns: + A test generator to be handled by TestGeneratorMetaclass. + """ + return _ParameterDecorator(_FIRST_ARG, testcases) + + +class TestGeneratorMetaclass(type): + """Metaclass for test cases with test generators. + + A test generator is an iterable in a testcase that produces callables. These + callables must be single-argument methods. These methods are injected into + the class namespace and the original iterable is removed. If the name of the + iterable conforms to the test pattern, the injected methods will be picked + up as tests by the unittest framework. + + In general, it is supposed to be used in conjuction with the + Parameters decorator. + """ + + def __new__(mcs, class_name, bases, dct): + dct['_id_suffix'] = id_suffix = {} + for name, obj in dct.items(): + if (name.startswith(unittest.TestLoader.testMethodPrefix) and + _NonStringIterable(obj)): + iterator = iter(obj) + dct.pop(name) + _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) + + return type.__new__(mcs, class_name, bases, dct) + + +def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): + """Adds individual test cases to a dictionary. + + Args: + dct: The target dictionary. + id_suffix: The dictionary for mapping names to test IDs. + name: The original name of the test case. + iterator: The iterator generating the individual test cases. + """ + for idx, func in enumerate(iterator): + assert callable(func), 'Test generators must yield callables, got %r' % ( + func,) + if getattr(func, '__x_use_name__', False): + new_name = func.__name__ + else: + new_name = '%s%s%d' % (name, _SEPARATOR, idx) + assert new_name not in dct, ( + 'Name of parameterized test case "%s" not unique' % (new_name,)) + dct[new_name] = func + id_suffix[new_name] = getattr(func, '__x_extra_id__', '') + + +class ParameterizedTestCase(unittest.TestCase): + """Base class for test cases using the Parameters decorator.""" + __metaclass__ = TestGeneratorMetaclass + + def _OriginalName(self): + return self._testMethodName.split(_SEPARATOR)[0] + + def __str__(self): + return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) + + def id(self): # pylint: disable=invalid-name + """Returns the descriptive ID of the test. + + This is used internally by the unittesting framework to get a name + for the test to be used in reports. + + Returns: + The test id. + """ + return '%s.%s%s' % (_StrClass(self.__class__), + self._OriginalName(), + self._id_suffix.get(self._testMethodName, '')) + + +def CoopParameterizedTestCase(other_base_class): + """Returns a new base class with a cooperative metaclass base. + + This enables the ParameterizedTestCase to be used in combination + with other base classes that have custom metaclasses, such as + mox.MoxTestBase. + + Only works with metaclasses that do not override type.__new__. + + Example: + + import google3 + import mox + + from google3.testing.pybase import parameterized + + class ExampleTest(parameterized.CoopParameterizedTestCase(mox.MoxTestBase)): + ... + + Args: + other_base_class: (class) A test case base class. + + Returns: + A new class object. + """ + metaclass = type( + 'CoopMetaclass', + (other_base_class.__metaclass__, + TestGeneratorMetaclass), {}) + return metaclass( + 'CoopParameterizedTestCase', + (other_base_class, ParameterizedTestCase), {}) diff --git a/deps/google/protobuf/internal/any_test_pb2.py b/deps/google/protobuf/internal/any_test_pb2.py new file mode 100644 index 00000000..ded08b61 --- /dev/null +++ b/deps/google/protobuf/internal/any_test_pb2.py @@ -0,0 +1,79 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/any_test.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/any_test.proto', + package='google.protobuf.internal', + syntax='proto3', + serialized_pb=_b('\n\'google/protobuf/internal/any_test.proto\x12\x18google.protobuf.internal\x1a\x19google/protobuf/any.proto\"A\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tint_value\x18\x02 \x01(\x05\x62\x06proto3') + , + dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TESTANY = _descriptor.Descriptor( + name='TestAny', + full_name='google.protobuf.internal.TestAny', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.internal.TestAny.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int_value', full_name='google.protobuf.internal.TestAny.int_value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=96, + serialized_end=161, +) + +_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY +DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY + +TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict( + DESCRIPTOR = _TESTANY, + __module__ = 'google.protobuf.internal.any_test_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAny) + )) +_sym_db.RegisterMessage(TestAny) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/api_implementation.py b/deps/google/protobuf/internal/api_implementation.py new file mode 100644 index 00000000..ffcf7511 --- /dev/null +++ b/deps/google/protobuf/internal/api_implementation.py @@ -0,0 +1,107 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import os +import sys + +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _api_version = _api_implementation.api_version + _proto_extension_modules_exist_in_build = True +except ImportError: + _api_version = -1 # Unspecified by compiler flags. + _proto_extension_modules_exist_in_build = False + +if _api_version == 1: + raise ValueError('api_version=1 is no longer supported.') +if _api_version < 0: # Still unspecified? + try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps rather than a compiler flag or the + # runtime environment variable. + # pylint: disable=g-import-not-at-top + from google.protobuf import _use_fast_cpp_protos + # Work around a known issue in the classic bootstrap .par import hook. + if not _use_fast_cpp_protos: + raise ImportError('_use_fast_cpp_protos import succeeded but was None') + del _use_fast_cpp_protos + _api_version = 2 + except ImportError: + if _proto_extension_modules_exist_in_build: + if sys.version_info[0] >= 3: # Python 3 defaults to C++ impl v2. + _api_version = 2 + # TODO(b/17427486): Make Python 2 default to C++ impl v2. + +_default_implementation_type = ( + 'python' if _api_version <= 0 else 'cpp') + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python' and 'cpp' are valid +# values. Any other value will be ignored. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _default_implementation_type) + +if _implementation_type != 'python': + _implementation_type = 'cpp' + +# This environment variable can be used to switch between the two +# 'cpp' implementations, overriding the compile-time constants in the +# _api_implementation module. Right now only '2' is supported. Any other +# value will cause an error to be raised. +_implementation_version_str = os.getenv( + 'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', '2') + +if _implementation_version_str != '2': + raise ValueError( + 'unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: "' + + _implementation_version_str + '" (supported versions: 2)' + ) + +_implementation_version = int(_implementation_version_str) + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +# See comment on 'Type' above. +def Version(): + return _implementation_version diff --git a/deps/google/protobuf/internal/containers.py b/deps/google/protobuf/internal/containers.py new file mode 100644 index 00000000..97cdd848 --- /dev/null +++ b/deps/google/protobuf/internal/containers.py @@ -0,0 +1,611 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains container classes to represent different protocol buffer types. + +This file defines container classes which represent categories of protocol +buffer field types which need extra maintenance. Currently these categories +are: + - Repeated scalar fields - These are all repeated fields which aren't + composite (e.g. they are of simple types like int32, string, etc). + - Repeated composite fields - Repeated fields which are composite. This + includes groups and nested messages. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + +import collections +import sys + +if sys.version_info[0] < 3: + # We would use collections.MutableMapping all the time, but in Python 2 it + # doesn't define __slots__. This causes two significant problems: + # + # 1. we can't disallow arbitrary attribute assignment, even if our derived + # classes *do* define __slots__. + # + # 2. we can't safely derive a C type from it without __slots__ defined (the + # interpreter expects to find a dict at tp_dictoffset, which we can't + # robustly provide. And we don't want an instance dict anyway. + # + # So this is the Python 2.7 definition of Mapping/MutableMapping functions + # verbatim, except that: + # 1. We declare __slots__. + # 2. We don't declare this as a virtual base class. The classes defined + # in collections are the interesting base classes, not us. + # + # Note: deriving from object is critical. It is the only thing that makes + # this a true type, allowing us to derive from it in C++ cleanly and making + # __slots__ properly disallow arbitrary element assignment. + + class Mapping(object): + __slots__ = () + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def __contains__(self, key): + try: + self[key] + except KeyError: + return False + else: + return True + + def iterkeys(self): + return iter(self) + + def itervalues(self): + for key in self: + yield self[key] + + def iteritems(self): + for key in self: + yield (key, self[key]) + + def keys(self): + return list(self) + + def items(self): + return [(key, self[key]) for key in self] + + def values(self): + return [self[key] for key in self] + + # Mappings are not hashable by default, but subclasses can change this + __hash__ = None + + def __eq__(self, other): + if not isinstance(other, collections.Mapping): + return NotImplemented + return dict(self.items()) == dict(other.items()) + + def __ne__(self, other): + return not (self == other) + + class MutableMapping(Mapping): + __slots__ = () + + __marker = object() + + def pop(self, key, default=__marker): + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def popitem(self): + try: + key = next(iter(self)) + except StopIteration: + raise KeyError + value = self[key] + del self[key] + return key, value + + def clear(self): + try: + while True: + self.popitem() + except KeyError: + pass + + def update(*args, **kwds): + if len(args) > 2: + raise TypeError("update() takes at most 2 positional " + "arguments ({} given)".format(len(args))) + elif not args: + raise TypeError("update() takes at least 1 argument (0 given)") + self = args[0] + other = args[1] if len(args) >= 2 else () + + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + def setdefault(self, key, default=None): + try: + return self[key] + except KeyError: + self[key] = default + return default + + collections.Mapping.register(Mapping) + collections.MutableMapping.register(MutableMapping) + +else: + # In Python 3 we can just use MutableMapping directly, because it defines + # __slots__. + MutableMapping = collections.MutableMapping + + +class BaseContainer(object): + + """Base container class.""" + + # Minimizes memory usage and disallows assignment to other attributes. + __slots__ = ['_message_listener', '_values'] + + def __init__(self, message_listener): + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + """ + self._message_listener = message_listener + self._values = [] + + def __getitem__(self, key): + """Retrieves item by the specified key.""" + return self._values[key] + + def __len__(self): + """Returns the number of elements in the container.""" + return len(self._values) + + def __ne__(self, other): + """Checks if another instance isn't equal to this one.""" + # The concrete classes should define __eq__. + return not self == other + + def __hash__(self): + raise TypeError('unhashable object') + + def __repr__(self): + return repr(self._values) + + def sort(self, *args, **kwargs): + # Continue to support the old sort_function keyword argument. + # This is expected to be a rare occurrence, so use LBYL to avoid + # the overhead of actually catching KeyError. + if 'sort_function' in kwargs: + kwargs['cmp'] = kwargs.pop('sort_function') + self._values.sort(*args, **kwargs) + + +class RepeatedScalarFieldContainer(BaseContainer): + + """Simple, type-checked, list-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_type_checker'] + + def __init__(self, message_listener, type_checker): + """ + Args: + message_listener: A MessageListener implementation. + The RepeatedScalarFieldContainer will call this object's + Modified() method when it is modified. + type_checker: A type_checkers.ValueChecker instance to run on elements + inserted into this container. + """ + super(RepeatedScalarFieldContainer, self).__init__(message_listener) + self._type_checker = type_checker + + def append(self, value): + """Appends an item to the list. Similar to list.append().""" + self._values.append(self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def insert(self, key, value): + """Inserts the item at the specified position. Similar to list.insert().""" + self._values.insert(key, self._type_checker.CheckValue(value)) + if not self._message_listener.dirty: + self._message_listener.Modified() + + def extend(self, elem_seq): + """Extends by appending the given iterable. Similar to list.extend().""" + + if elem_seq is None: + return + try: + elem_seq_iter = iter(elem_seq) + except TypeError: + if not elem_seq: + # silently ignore falsy inputs :-/. + # TODO(ptucker): Deprecate this behavior. b/18413862 + return + raise + + new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] + if new_values: + self._values.extend(new_values) + self._message_listener.Modified() + + def MergeFrom(self, other): + """Appends the contents of another repeated field of the same type to this + one. We do not check the types of the individual fields. + """ + self._values.extend(other._values) + self._message_listener.Modified() + + def remove(self, elem): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key=-1): + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + def __setitem__(self, key, value): + """Sets the item on the specified position.""" + if isinstance(key, slice): # PY3 + if key.step is not None: + raise ValueError('Extended slices not supported') + self.__setslice__(key.start, key.stop, value) + else: + self._values[key] = self._type_checker.CheckValue(value) + self._message_listener.Modified() + + def __getslice__(self, start, stop): + """Retrieves the subset of items from between the specified indices.""" + return self._values[start:stop] + + def __setslice__(self, start, stop, values): + """Sets the subset of items from between the specified indices.""" + new_values = [] + for value in values: + new_values.append(self._type_checker.CheckValue(value)) + self._values[start:stop] = new_values + self._message_listener.Modified() + + def __delitem__(self, key): + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __delslice__(self, start, stop): + """Deletes the subset of items from between the specified indices.""" + del self._values[start:stop] + self._message_listener.Modified() + + def __eq__(self, other): + """Compares the current instance with another one.""" + if self is other: + return True + # Special case for the same type which should be common and fast. + if isinstance(other, self.__class__): + return other._values == self._values + # We are presumably comparing against some other sequence type. + return other == self._values + +collections.MutableSequence.register(BaseContainer) + + +class RepeatedCompositeFieldContainer(BaseContainer): + + """Simple, list-like container for holding repeated composite fields.""" + + # Disallows assignment to other attributes. + __slots__ = ['_message_descriptor'] + + def __init__(self, message_listener, message_descriptor): + """ + Note that we pass in a descriptor instead of the generated directly, + since at the time we construct a _RepeatedCompositeFieldContainer we + haven't yet necessarily initialized the type that will be contained in the + container. + + Args: + message_listener: A MessageListener implementation. + The RepeatedCompositeFieldContainer will call this object's + Modified() method when it is modified. + message_descriptor: A Descriptor instance describing the protocol type + that should be present in this container. We'll use the + _concrete_class field of this descriptor when the client calls add(). + """ + super(RepeatedCompositeFieldContainer, self).__init__(message_listener) + self._message_descriptor = message_descriptor + + def add(self, **kwargs): + """Adds a new element at the end of the list and returns it. Keyword + arguments may be used to initialize the element. + """ + new_element = self._message_descriptor._concrete_class(**kwargs) + new_element._SetListener(self._message_listener) + self._values.append(new_element) + if not self._message_listener.dirty: + self._message_listener.Modified() + return new_element + + def extend(self, elem_seq): + """Extends by appending the given sequence of elements of the same type + as this one, copying each individual message. + """ + message_class = self._message_descriptor._concrete_class + listener = self._message_listener + values = self._values + for message in elem_seq: + new_element = message_class() + new_element._SetListener(listener) + new_element.MergeFrom(message) + values.append(new_element) + listener.Modified() + + def MergeFrom(self, other): + """Appends the contents of another repeated field of the same type to this + one, copying each individual message. + """ + self.extend(other._values) + + def remove(self, elem): + """Removes an item from the list. Similar to list.remove().""" + self._values.remove(elem) + self._message_listener.Modified() + + def pop(self, key=-1): + """Removes and returns an item at a given index. Similar to list.pop().""" + value = self._values[key] + self.__delitem__(key) + return value + + def __getslice__(self, start, stop): + """Retrieves the subset of items from between the specified indices.""" + return self._values[start:stop] + + def __delitem__(self, key): + """Deletes the item at the specified position.""" + del self._values[key] + self._message_listener.Modified() + + def __delslice__(self, start, stop): + """Deletes the subset of items from between the specified indices.""" + del self._values[start:stop] + self._message_listener.Modified() + + def __eq__(self, other): + """Compares the current instance with another one.""" + if self is other: + return True + if not isinstance(other, self.__class__): + raise TypeError('Can only compare repeated composite fields against ' + 'other repeated composite fields.') + return self._values == other._values + + +class ScalarMap(MutableMapping): + + """Simple, type-checked, dict-like container for holding repeated scalars.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener'] + + def __init__(self, message_listener, key_checker, value_checker): + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + """ + self._message_listener = message_listener + self._key_checker = key_checker + self._value_checker = value_checker + self._values = {} + + def __getitem__(self, key): + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + val = self._value_checker.DefaultValue() + self._values[key] = val + return val + + def __contains__(self, item): + # We check the key's type to match the strong-typing flavor of the API. + # Also this makes it easier to match the behavior of the C++ implementation. + self._key_checker.CheckValue(item) + return item in self._values + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __setitem__(self, key, value): + checked_key = self._key_checker.CheckValue(key) + checked_value = self._value_checker.CheckValue(value) + self._values[checked_key] = checked_value + self._message_listener.Modified() + + def __delitem__(self, key): + del self._values[key] + self._message_listener.Modified() + + def __len__(self): + return len(self._values) + + def __iter__(self): + return iter(self._values) + + def __repr__(self): + return repr(self._values) + + def MergeFrom(self, other): + self._values.update(other._values) + self._message_listener.Modified() + + def InvalidateIterators(self): + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self): + self._values.clear() + self._message_listener.Modified() + + +class MessageMap(MutableMapping): + + """Simple, type-checked, dict-like container for with submessage values.""" + + # Disallows assignment to other attributes. + __slots__ = ['_key_checker', '_values', '_message_listener', + '_message_descriptor'] + + def __init__(self, message_listener, message_descriptor, key_checker): + """ + Args: + message_listener: A MessageListener implementation. + The ScalarMap will call this object's Modified() method when it + is modified. + key_checker: A type_checkers.ValueChecker instance to run on keys + inserted into this container. + value_checker: A type_checkers.ValueChecker instance to run on values + inserted into this container. + """ + self._message_listener = message_listener + self._message_descriptor = message_descriptor + self._key_checker = key_checker + self._values = {} + + def __getitem__(self, key): + try: + return self._values[key] + except KeyError: + key = self._key_checker.CheckValue(key) + new_element = self._message_descriptor._concrete_class() + new_element._SetListener(self._message_listener) + self._values[key] = new_element + self._message_listener.Modified() + + return new_element + + def get_or_create(self, key): + """get_or_create() is an alias for getitem (ie. map[key]). + + Args: + key: The key to get or create in the map. + + This is useful in cases where you want to be explicit that the call is + mutating the map. This can avoid lint errors for statements like this + that otherwise would appear to be pointless statements: + + msg.my_map[key] + """ + return self[key] + + # We need to override this explicitly, because our defaultdict-like behavior + # will make the default implementation (from our base class) always insert + # the key. + def get(self, key, default=None): + if key in self: + return self[key] + else: + return default + + def __contains__(self, item): + return item in self._values + + def __setitem__(self, key, value): + raise ValueError('May not set values directly, call my_map[key].foo = 5') + + def __delitem__(self, key): + del self._values[key] + self._message_listener.Modified() + + def __len__(self): + return len(self._values) + + def __iter__(self): + return iter(self._values) + + def __repr__(self): + return repr(self._values) + + def MergeFrom(self, other): + for key in other: + self[key].MergeFrom(other[key]) + # self._message_listener.Modified() not required here, because + # mutations to submessages already propagate. + + def InvalidateIterators(self): + # It appears that the only way to reliably invalidate iterators to + # self._values is to ensure that its size changes. + original = self._values + self._values = original.copy() + original[None] = None + + # This is defined in the abstract base, but we can do it much more cheaply. + def clear(self): + self._values.clear() + self._message_listener.Modified() diff --git a/deps/google/protobuf/internal/decoder.py b/deps/google/protobuf/internal/decoder.py new file mode 100644 index 00000000..31869e45 --- /dev/null +++ b/deps/google/protobuf/internal/decoder.py @@ -0,0 +1,854 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import struct + +import six + +if six.PY3: + long = int + +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import message + + +# This will overflow and thus become IEEE-754 "infinity". We would use +# "float('inf')" but it doesn't work on Windows pre-Python-2.6. +_POS_INF = 1e10000 +_NEG_INF = -_POS_INF +_NAN = _POS_INF * 0 + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = six.indexbytes(buffer, pos) + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + + +def _SignedVarintDecoder(mask, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = six.indexbytes(buffer, pos) + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + if result > 0x7fffffffffffffff: + result -= (1 << 64) + result |= ~mask + else: + result &= mask + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# We force 32-bit values to int and 64-bit values to long to make +# alternate implementations where the distinction is more significant +# (e.g. the C++ implementation) simpler. + +_DecodeVarint = _VarintDecoder((1 << 64) - 1, long) +_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1, int) + + +def ReadTag(buffer, pos): + """Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + """ + + start = pos + while six.indexbytes(buffer, pos) & 0x80: + pos += 1 + pos += 1 + return (buffer[start:pos], pos) + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (field_dict[key], pos) = decode_value(buffer, pos) + if pos > end: + del field_dict[key] # Discard corrupt value. + raise _DecodeError('Truncated message.') + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos] + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (_NAN, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (_NEG_INF, new_pos) + return (_POS_INF, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (_NAN, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos])) + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos])) + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos]) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + field_dict[key] = buffer[pos:new_pos] + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(extensions_by_number): + """Returns a decoder for a MessageSet item. + + The parameter is the _extensions_by_number map for the message class. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = extensions_by_number.get(type_id) + if extension is not None: + value = field_dict.get(extension) + if value is None: + value = field_dict.setdefault( + extension, extension.message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append((MESSAGE_SET_ITEM_TAG, + buffer[message_set_item_start:pos])) + + return pos + + return DecodeItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].MergeFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1]) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipLengthDelimited(buffer, pos, end): + """Skip a length-delimited value. Returns the new position.""" + + (size, pos) = _DecodeVarint(buffer, pos) + pos += size + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _RaiseInvalidWireType(buffer, pos, end): + """Skip function for unknown wire types. Raises an exception.""" + + raise _DecodeError('Tag had invalid wire type.') + +def _FieldSkipper(): + """Constructs the SkipField function.""" + + WIRETYPE_TO_SKIPPER = [ + _SkipVarint, + _SkipFixed64, + _SkipLengthDelimited, + _SkipGroup, + _EndGroup, + _SkipFixed32, + _RaiseInvalidWireType, + _RaiseInvalidWireType, + ] + + wiretype_mask = wire_format.TAG_TYPE_MASK + + def SkipField(buffer, pos, end, tag_bytes): + """Skips a field with the specified tag. + + |pos| should point to the byte immediately after the tag. + + Returns: + The new position (after the tag value), or -1 if the tag is an end-group + tag (in which case the calling loop should break). + """ + + # The wire type is always in the first byte since varints are little-endian. + wire_type = ord(tag_bytes[0:1]) & wiretype_mask + return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end) + + return SkipField + +SkipField = _FieldSkipper() diff --git a/deps/google/protobuf/internal/descriptor_database_test.py b/deps/google/protobuf/internal/descriptor_database_test.py new file mode 100644 index 00000000..1baff7d1 --- /dev/null +++ b/deps/google/protobuf/internal/descriptor_database_test.py @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests for google.protobuf.descriptor_database.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import descriptor_pb2 +from google.protobuf.internal import factory_test2_pb2 +from google.protobuf import descriptor_database + + +class DescriptorDatabaseTest(unittest.TestCase): + + def testAdd(self): + db = descriptor_database.DescriptorDatabase() + file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( + factory_test2_pb2.DESCRIPTOR.serialized_pb) + db.Add(file_desc_proto) + + self.assertEqual(file_desc_proto, db.FindFileByName( + 'google/protobuf/internal/factory_test2.proto')) + self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( + 'google.protobuf.python.internal.Factory2Message')) + self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( + 'google.protobuf.python.internal.Factory2Message.NestedFactory2Message')) + self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( + 'google.protobuf.python.internal.Factory2Enum')) + self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( + 'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum')) + self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( + 'google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum')) + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/descriptor_pool_test.py b/deps/google/protobuf/internal/descriptor_pool_test.py new file mode 100644 index 00000000..f1d6bf99 --- /dev/null +++ b/deps/google/protobuf/internal/descriptor_pool_test.py @@ -0,0 +1,763 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests for google.protobuf.descriptor_pool.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +import os + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import unittest_import_pb2 +from google.protobuf import unittest_import_public_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf.internal import api_implementation +from google.protobuf.internal import descriptor_pool_test1_pb2 +from google.protobuf.internal import descriptor_pool_test2_pb2 +from google.protobuf.internal import factory_test1_pb2 +from google.protobuf.internal import factory_test2_pb2 +from google.protobuf.internal import test_util +from google.protobuf import descriptor +from google.protobuf import descriptor_database +from google.protobuf import descriptor_pool +from google.protobuf import message_factory +from google.protobuf import symbol_database + + +class DescriptorPoolTest(unittest.TestCase): + + def CreatePool(self): + return descriptor_pool.DescriptorPool() + + def setUp(self): + self.pool = self.CreatePool() + self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( + factory_test1_pb2.DESCRIPTOR.serialized_pb) + self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( + factory_test2_pb2.DESCRIPTOR.serialized_pb) + self.pool.Add(self.factory_test1_fd) + self.pool.Add(self.factory_test2_fd) + + def testFindFileByName(self): + name1 = 'google/protobuf/internal/factory_test1.proto' + file_desc1 = self.pool.FindFileByName(name1) + self.assertIsInstance(file_desc1, descriptor.FileDescriptor) + self.assertEqual(name1, file_desc1.name) + self.assertEqual('google.protobuf.python.internal', file_desc1.package) + self.assertIn('Factory1Message', file_desc1.message_types_by_name) + + name2 = 'google/protobuf/internal/factory_test2.proto' + file_desc2 = self.pool.FindFileByName(name2) + self.assertIsInstance(file_desc2, descriptor.FileDescriptor) + self.assertEqual(name2, file_desc2.name) + self.assertEqual('google.protobuf.python.internal', file_desc2.package) + self.assertIn('Factory2Message', file_desc2.message_types_by_name) + + def testFindFileByNameFailure(self): + with self.assertRaises(KeyError): + self.pool.FindFileByName('Does not exist') + + def testFindFileContainingSymbol(self): + file_desc1 = self.pool.FindFileContainingSymbol( + 'google.protobuf.python.internal.Factory1Message') + self.assertIsInstance(file_desc1, descriptor.FileDescriptor) + self.assertEqual('google/protobuf/internal/factory_test1.proto', + file_desc1.name) + self.assertEqual('google.protobuf.python.internal', file_desc1.package) + self.assertIn('Factory1Message', file_desc1.message_types_by_name) + + file_desc2 = self.pool.FindFileContainingSymbol( + 'google.protobuf.python.internal.Factory2Message') + self.assertIsInstance(file_desc2, descriptor.FileDescriptor) + self.assertEqual('google/protobuf/internal/factory_test2.proto', + file_desc2.name) + self.assertEqual('google.protobuf.python.internal', file_desc2.package) + self.assertIn('Factory2Message', file_desc2.message_types_by_name) + + def testFindFileContainingSymbolFailure(self): + with self.assertRaises(KeyError): + self.pool.FindFileContainingSymbol('Does not exist') + + def testFindMessageTypeByName(self): + msg1 = self.pool.FindMessageTypeByName( + 'google.protobuf.python.internal.Factory1Message') + self.assertIsInstance(msg1, descriptor.Descriptor) + self.assertEqual('Factory1Message', msg1.name) + self.assertEqual('google.protobuf.python.internal.Factory1Message', + msg1.full_name) + self.assertEqual(None, msg1.containing_type) + + nested_msg1 = msg1.nested_types[0] + self.assertEqual('NestedFactory1Message', nested_msg1.name) + self.assertEqual(msg1, nested_msg1.containing_type) + + nested_enum1 = msg1.enum_types[0] + self.assertEqual('NestedFactory1Enum', nested_enum1.name) + self.assertEqual(msg1, nested_enum1.containing_type) + + self.assertEqual(nested_msg1, msg1.fields_by_name[ + 'nested_factory_1_message'].message_type) + self.assertEqual(nested_enum1, msg1.fields_by_name[ + 'nested_factory_1_enum'].enum_type) + + msg2 = self.pool.FindMessageTypeByName( + 'google.protobuf.python.internal.Factory2Message') + self.assertIsInstance(msg2, descriptor.Descriptor) + self.assertEqual('Factory2Message', msg2.name) + self.assertEqual('google.protobuf.python.internal.Factory2Message', + msg2.full_name) + self.assertIsNone(msg2.containing_type) + + nested_msg2 = msg2.nested_types[0] + self.assertEqual('NestedFactory2Message', nested_msg2.name) + self.assertEqual(msg2, nested_msg2.containing_type) + + nested_enum2 = msg2.enum_types[0] + self.assertEqual('NestedFactory2Enum', nested_enum2.name) + self.assertEqual(msg2, nested_enum2.containing_type) + + self.assertEqual(nested_msg2, msg2.fields_by_name[ + 'nested_factory_2_message'].message_type) + self.assertEqual(nested_enum2, msg2.fields_by_name[ + 'nested_factory_2_enum'].enum_type) + + self.assertTrue(msg2.fields_by_name['int_with_default'].has_default_value) + self.assertEqual( + 1776, msg2.fields_by_name['int_with_default'].default_value) + + self.assertTrue( + msg2.fields_by_name['double_with_default'].has_default_value) + self.assertEqual( + 9.99, msg2.fields_by_name['double_with_default'].default_value) + + self.assertTrue( + msg2.fields_by_name['string_with_default'].has_default_value) + self.assertEqual( + 'hello world', msg2.fields_by_name['string_with_default'].default_value) + + self.assertTrue(msg2.fields_by_name['bool_with_default'].has_default_value) + self.assertFalse(msg2.fields_by_name['bool_with_default'].default_value) + + self.assertTrue(msg2.fields_by_name['enum_with_default'].has_default_value) + self.assertEqual( + 1, msg2.fields_by_name['enum_with_default'].default_value) + + msg3 = self.pool.FindMessageTypeByName( + 'google.protobuf.python.internal.Factory2Message.NestedFactory2Message') + self.assertEqual(nested_msg2, msg3) + + self.assertTrue(msg2.fields_by_name['bytes_with_default'].has_default_value) + self.assertEqual( + b'a\xfb\x00c', + msg2.fields_by_name['bytes_with_default'].default_value) + + self.assertEqual(1, len(msg2.oneofs)) + self.assertEqual(1, len(msg2.oneofs_by_name)) + self.assertEqual(2, len(msg2.oneofs[0].fields)) + for name in ['oneof_int', 'oneof_string']: + self.assertEqual(msg2.oneofs[0], + msg2.fields_by_name[name].containing_oneof) + self.assertIn(msg2.fields_by_name[name], msg2.oneofs[0].fields) + + def testFindMessageTypeByNameFailure(self): + with self.assertRaises(KeyError): + self.pool.FindMessageTypeByName('Does not exist') + + def testFindEnumTypeByName(self): + enum1 = self.pool.FindEnumTypeByName( + 'google.protobuf.python.internal.Factory1Enum') + self.assertIsInstance(enum1, descriptor.EnumDescriptor) + self.assertEqual(0, enum1.values_by_name['FACTORY_1_VALUE_0'].number) + self.assertEqual(1, enum1.values_by_name['FACTORY_1_VALUE_1'].number) + + nested_enum1 = self.pool.FindEnumTypeByName( + 'google.protobuf.python.internal.Factory1Message.NestedFactory1Enum') + self.assertIsInstance(nested_enum1, descriptor.EnumDescriptor) + self.assertEqual( + 0, nested_enum1.values_by_name['NESTED_FACTORY_1_VALUE_0'].number) + self.assertEqual( + 1, nested_enum1.values_by_name['NESTED_FACTORY_1_VALUE_1'].number) + + enum2 = self.pool.FindEnumTypeByName( + 'google.protobuf.python.internal.Factory2Enum') + self.assertIsInstance(enum2, descriptor.EnumDescriptor) + self.assertEqual(0, enum2.values_by_name['FACTORY_2_VALUE_0'].number) + self.assertEqual(1, enum2.values_by_name['FACTORY_2_VALUE_1'].number) + + nested_enum2 = self.pool.FindEnumTypeByName( + 'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum') + self.assertIsInstance(nested_enum2, descriptor.EnumDescriptor) + self.assertEqual( + 0, nested_enum2.values_by_name['NESTED_FACTORY_2_VALUE_0'].number) + self.assertEqual( + 1, nested_enum2.values_by_name['NESTED_FACTORY_2_VALUE_1'].number) + + def testFindEnumTypeByNameFailure(self): + with self.assertRaises(KeyError): + self.pool.FindEnumTypeByName('Does not exist') + + def testFindFieldByName(self): + field = self.pool.FindFieldByName( + 'google.protobuf.python.internal.Factory1Message.list_value') + self.assertEqual(field.name, 'list_value') + self.assertEqual(field.label, field.LABEL_REPEATED) + with self.assertRaises(KeyError): + self.pool.FindFieldByName('Does not exist') + + def testFindExtensionByName(self): + # An extension defined in a message. + extension = self.pool.FindExtensionByName( + 'google.protobuf.python.internal.Factory2Message.one_more_field') + self.assertEqual(extension.name, 'one_more_field') + # An extension defined at file scope. + extension = self.pool.FindExtensionByName( + 'google.protobuf.python.internal.another_field') + self.assertEqual(extension.name, 'another_field') + self.assertEqual(extension.number, 1002) + with self.assertRaises(KeyError): + self.pool.FindFieldByName('Does not exist') + + def testExtensionsAreNotFields(self): + with self.assertRaises(KeyError): + self.pool.FindFieldByName('google.protobuf.python.internal.another_field') + with self.assertRaises(KeyError): + self.pool.FindFieldByName( + 'google.protobuf.python.internal.Factory2Message.one_more_field') + with self.assertRaises(KeyError): + self.pool.FindExtensionByName( + 'google.protobuf.python.internal.Factory1Message.list_value') + + def testUserDefinedDB(self): + db = descriptor_database.DescriptorDatabase() + self.pool = descriptor_pool.DescriptorPool(db) + db.Add(self.factory_test1_fd) + db.Add(self.factory_test2_fd) + self.testFindMessageTypeByName() + + def testAddSerializedFile(self): + self.pool = descriptor_pool.DescriptorPool() + self.pool.AddSerializedFile(self.factory_test1_fd.SerializeToString()) + self.pool.AddSerializedFile(self.factory_test2_fd.SerializeToString()) + self.testFindMessageTypeByName() + + def testComplexNesting(self): + test1_desc = descriptor_pb2.FileDescriptorProto.FromString( + descriptor_pool_test1_pb2.DESCRIPTOR.serialized_pb) + test2_desc = descriptor_pb2.FileDescriptorProto.FromString( + descriptor_pool_test2_pb2.DESCRIPTOR.serialized_pb) + self.pool.Add(test1_desc) + self.pool.Add(test2_desc) + TEST1_FILE.CheckFile(self, self.pool) + TEST2_FILE.CheckFile(self, self.pool) + + + def testEnumDefaultValue(self): + """Test the default value of enums which don't start at zero.""" + def _CheckDefaultValue(file_descriptor): + default_value = (file_descriptor + .message_types_by_name['DescriptorPoolTest1'] + .fields_by_name['nested_enum'] + .default_value) + self.assertEqual(default_value, + descriptor_pool_test1_pb2.DescriptorPoolTest1.BETA) + # First check what the generated descriptor contains. + _CheckDefaultValue(descriptor_pool_test1_pb2.DESCRIPTOR) + # Then check the generated pool. Normally this is the same descriptor. + file_descriptor = symbol_database.Default().pool.FindFileByName( + 'google/protobuf/internal/descriptor_pool_test1.proto') + self.assertIs(file_descriptor, descriptor_pool_test1_pb2.DESCRIPTOR) + _CheckDefaultValue(file_descriptor) + + # Then check the dynamic pool and its internal DescriptorDatabase. + descriptor_proto = descriptor_pb2.FileDescriptorProto.FromString( + descriptor_pool_test1_pb2.DESCRIPTOR.serialized_pb) + self.pool.Add(descriptor_proto) + # And do the same check as above + file_descriptor = self.pool.FindFileByName( + 'google/protobuf/internal/descriptor_pool_test1.proto') + _CheckDefaultValue(file_descriptor) + + def testDefaultValueForCustomMessages(self): + """Check the value returned by non-existent fields.""" + def _CheckValueAndType(value, expected_value, expected_type): + self.assertEqual(value, expected_value) + self.assertIsInstance(value, expected_type) + + def _CheckDefaultValues(msg): + try: + int64 = long + except NameError: # Python3 + int64 = int + try: + unicode_type = unicode + except NameError: # Python3 + unicode_type = str + _CheckValueAndType(msg.optional_int32, 0, int) + _CheckValueAndType(msg.optional_uint64, 0, (int64, int)) + _CheckValueAndType(msg.optional_float, 0, (float, int)) + _CheckValueAndType(msg.optional_double, 0, (float, int)) + _CheckValueAndType(msg.optional_bool, False, bool) + _CheckValueAndType(msg.optional_string, u'', unicode_type) + _CheckValueAndType(msg.optional_bytes, b'', bytes) + _CheckValueAndType(msg.optional_nested_enum, msg.FOO, int) + # First for the generated message + _CheckDefaultValues(unittest_pb2.TestAllTypes()) + # Then for a message built with from the DescriptorPool. + pool = descriptor_pool.DescriptorPool() + pool.Add(descriptor_pb2.FileDescriptorProto.FromString( + unittest_import_public_pb2.DESCRIPTOR.serialized_pb)) + pool.Add(descriptor_pb2.FileDescriptorProto.FromString( + unittest_import_pb2.DESCRIPTOR.serialized_pb)) + pool.Add(descriptor_pb2.FileDescriptorProto.FromString( + unittest_pb2.DESCRIPTOR.serialized_pb)) + message_class = message_factory.MessageFactory(pool).GetPrototype( + pool.FindMessageTypeByName( + unittest_pb2.TestAllTypes.DESCRIPTOR.full_name)) + _CheckDefaultValues(message_class()) + + +@unittest.skipIf(api_implementation.Type() != 'cpp', + 'explicit tests of the C++ implementation') +class CppDescriptorPoolTest(DescriptorPoolTest): + # TODO(amauryfa): remove when descriptor_pool.DescriptorPool() creates true + # C++ descriptor pool object for C++ implementation. + + def CreatePool(self): + # pylint: disable=g-import-not-at-top + from google.protobuf.pyext import _message + return _message.DescriptorPool() + + +class ProtoFile(object): + + def __init__(self, name, package, messages, dependencies=None): + self.name = name + self.package = package + self.messages = messages + self.dependencies = dependencies or [] + + def CheckFile(self, test, pool): + file_desc = pool.FindFileByName(self.name) + test.assertEqual(self.name, file_desc.name) + test.assertEqual(self.package, file_desc.package) + dependencies_names = [f.name for f in file_desc.dependencies] + test.assertEqual(self.dependencies, dependencies_names) + for name, msg_type in self.messages.items(): + msg_type.CheckType(test, None, name, file_desc) + + +class EnumType(object): + + def __init__(self, values): + self.values = values + + def CheckType(self, test, msg_desc, name, file_desc): + enum_desc = msg_desc.enum_types_by_name[name] + test.assertEqual(name, enum_desc.name) + expected_enum_full_name = '.'.join([msg_desc.full_name, name]) + test.assertEqual(expected_enum_full_name, enum_desc.full_name) + test.assertEqual(msg_desc, enum_desc.containing_type) + test.assertEqual(file_desc, enum_desc.file) + for index, (value, number) in enumerate(self.values): + value_desc = enum_desc.values_by_name[value] + test.assertEqual(value, value_desc.name) + test.assertEqual(index, value_desc.index) + test.assertEqual(number, value_desc.number) + test.assertEqual(enum_desc, value_desc.type) + test.assertIn(value, msg_desc.enum_values_by_name) + + +class MessageType(object): + + def __init__(self, type_dict, field_list, is_extendable=False, + extensions=None): + self.type_dict = type_dict + self.field_list = field_list + self.is_extendable = is_extendable + self.extensions = extensions or [] + + def CheckType(self, test, containing_type_desc, name, file_desc): + if containing_type_desc is None: + desc = file_desc.message_types_by_name[name] + expected_full_name = '.'.join([file_desc.package, name]) + else: + desc = containing_type_desc.nested_types_by_name[name] + expected_full_name = '.'.join([containing_type_desc.full_name, name]) + + test.assertEqual(name, desc.name) + test.assertEqual(expected_full_name, desc.full_name) + test.assertEqual(containing_type_desc, desc.containing_type) + test.assertEqual(desc.file, file_desc) + test.assertEqual(self.is_extendable, desc.is_extendable) + for name, subtype in self.type_dict.items(): + subtype.CheckType(test, desc, name, file_desc) + + for index, (name, field) in enumerate(self.field_list): + field.CheckField(test, desc, name, index) + + for index, (name, field) in enumerate(self.extensions): + field.CheckField(test, desc, name, index) + + +class EnumField(object): + + def __init__(self, number, type_name, default_value): + self.number = number + self.type_name = type_name + self.default_value = default_value + + def CheckField(self, test, msg_desc, name, index): + field_desc = msg_desc.fields_by_name[name] + enum_desc = msg_desc.enum_types_by_name[self.type_name] + test.assertEqual(name, field_desc.name) + expected_field_full_name = '.'.join([msg_desc.full_name, name]) + test.assertEqual(expected_field_full_name, field_desc.full_name) + test.assertEqual(index, field_desc.index) + test.assertEqual(self.number, field_desc.number) + test.assertEqual(descriptor.FieldDescriptor.TYPE_ENUM, field_desc.type) + test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_ENUM, + field_desc.cpp_type) + test.assertTrue(field_desc.has_default_value) + test.assertEqual(enum_desc.values_by_name[self.default_value].number, + field_desc.default_value) + test.assertEqual(msg_desc, field_desc.containing_type) + test.assertEqual(enum_desc, field_desc.enum_type) + + +class MessageField(object): + + def __init__(self, number, type_name): + self.number = number + self.type_name = type_name + + def CheckField(self, test, msg_desc, name, index): + field_desc = msg_desc.fields_by_name[name] + field_type_desc = msg_desc.nested_types_by_name[self.type_name] + test.assertEqual(name, field_desc.name) + expected_field_full_name = '.'.join([msg_desc.full_name, name]) + test.assertEqual(expected_field_full_name, field_desc.full_name) + test.assertEqual(index, field_desc.index) + test.assertEqual(self.number, field_desc.number) + test.assertEqual(descriptor.FieldDescriptor.TYPE_MESSAGE, field_desc.type) + test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_MESSAGE, + field_desc.cpp_type) + test.assertFalse(field_desc.has_default_value) + test.assertEqual(msg_desc, field_desc.containing_type) + test.assertEqual(field_type_desc, field_desc.message_type) + + +class StringField(object): + + def __init__(self, number, default_value): + self.number = number + self.default_value = default_value + + def CheckField(self, test, msg_desc, name, index): + field_desc = msg_desc.fields_by_name[name] + test.assertEqual(name, field_desc.name) + expected_field_full_name = '.'.join([msg_desc.full_name, name]) + test.assertEqual(expected_field_full_name, field_desc.full_name) + test.assertEqual(index, field_desc.index) + test.assertEqual(self.number, field_desc.number) + test.assertEqual(descriptor.FieldDescriptor.TYPE_STRING, field_desc.type) + test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_STRING, + field_desc.cpp_type) + test.assertTrue(field_desc.has_default_value) + test.assertEqual(self.default_value, field_desc.default_value) + + +class ExtensionField(object): + + def __init__(self, number, extended_type): + self.number = number + self.extended_type = extended_type + + def CheckField(self, test, msg_desc, name, index): + field_desc = msg_desc.extensions_by_name[name] + test.assertEqual(name, field_desc.name) + expected_field_full_name = '.'.join([msg_desc.full_name, name]) + test.assertEqual(expected_field_full_name, field_desc.full_name) + test.assertEqual(self.number, field_desc.number) + test.assertEqual(index, field_desc.index) + test.assertEqual(descriptor.FieldDescriptor.TYPE_MESSAGE, field_desc.type) + test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_MESSAGE, + field_desc.cpp_type) + test.assertFalse(field_desc.has_default_value) + test.assertTrue(field_desc.is_extension) + test.assertEqual(msg_desc, field_desc.extension_scope) + test.assertEqual(msg_desc, field_desc.message_type) + test.assertEqual(self.extended_type, field_desc.containing_type.name) + + +class AddDescriptorTest(unittest.TestCase): + + def _TestMessage(self, prefix): + pool = descriptor_pool.DescriptorPool() + pool.AddDescriptor(unittest_pb2.TestAllTypes.DESCRIPTOR) + self.assertEqual( + 'protobuf_unittest.TestAllTypes', + pool.FindMessageTypeByName( + prefix + 'protobuf_unittest.TestAllTypes').full_name) + + # AddDescriptor is not recursive. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName( + prefix + 'protobuf_unittest.TestAllTypes.NestedMessage') + + pool.AddDescriptor(unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR) + self.assertEqual( + 'protobuf_unittest.TestAllTypes.NestedMessage', + pool.FindMessageTypeByName( + prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').full_name) + + # Files are implicitly also indexed when messages are added. + self.assertEqual( + 'google/protobuf/unittest.proto', + pool.FindFileByName( + 'google/protobuf/unittest.proto').name) + + self.assertEqual( + 'google/protobuf/unittest.proto', + pool.FindFileContainingSymbol( + prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').name) + + @unittest.skipIf(api_implementation.Type() == 'cpp', + 'With the cpp implementation, Add() must be called first') + def testMessage(self): + self._TestMessage('') + self._TestMessage('.') + + def _TestEnum(self, prefix): + pool = descriptor_pool.DescriptorPool() + pool.AddEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR) + self.assertEqual( + 'protobuf_unittest.ForeignEnum', + pool.FindEnumTypeByName( + prefix + 'protobuf_unittest.ForeignEnum').full_name) + + # AddEnumDescriptor is not recursive. + with self.assertRaises(KeyError): + pool.FindEnumTypeByName( + prefix + 'protobuf_unittest.ForeignEnum.NestedEnum') + + pool.AddEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR) + self.assertEqual( + 'protobuf_unittest.TestAllTypes.NestedEnum', + pool.FindEnumTypeByName( + prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').full_name) + + # Files are implicitly also indexed when enums are added. + self.assertEqual( + 'google/protobuf/unittest.proto', + pool.FindFileByName( + 'google/protobuf/unittest.proto').name) + + self.assertEqual( + 'google/protobuf/unittest.proto', + pool.FindFileContainingSymbol( + prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').name) + + @unittest.skipIf(api_implementation.Type() == 'cpp', + 'With the cpp implementation, Add() must be called first') + def testEnum(self): + self._TestEnum('') + self._TestEnum('.') + + @unittest.skipIf(api_implementation.Type() == 'cpp', + 'With the cpp implementation, Add() must be called first') + def testFile(self): + pool = descriptor_pool.DescriptorPool() + pool.AddFileDescriptor(unittest_pb2.DESCRIPTOR) + self.assertEqual( + 'google/protobuf/unittest.proto', + pool.FindFileByName( + 'google/protobuf/unittest.proto').name) + + # AddFileDescriptor is not recursive; messages and enums within files must + # be explicitly registered. + with self.assertRaises(KeyError): + pool.FindFileContainingSymbol( + 'protobuf_unittest.TestAllTypes') + + def _GetDescriptorPoolClass(self): + # Test with both implementations of descriptor pools. + if api_implementation.Type() == 'cpp': + # pylint: disable=g-import-not-at-top + from google.protobuf.pyext import _message + return _message.DescriptorPool + else: + return descriptor_pool.DescriptorPool + + def testEmptyDescriptorPool(self): + # Check that an empty DescriptorPool() contains no message. + pool = self._GetDescriptorPoolClass()() + proto_file_name = descriptor_pb2.DESCRIPTOR.name + self.assertRaises(KeyError, pool.FindFileByName, proto_file_name) + # Add the above file to the pool + file_descriptor = descriptor_pb2.FileDescriptorProto() + descriptor_pb2.DESCRIPTOR.CopyToProto(file_descriptor) + pool.Add(file_descriptor) + # Now it exists. + self.assertTrue(pool.FindFileByName(proto_file_name)) + + def testCustomDescriptorPool(self): + # Create a new pool, and add a file descriptor. + pool = self._GetDescriptorPoolClass()() + file_desc = descriptor_pb2.FileDescriptorProto( + name='some/file.proto', package='package') + file_desc.message_type.add(name='Message') + pool.Add(file_desc) + self.assertEqual(pool.FindFileByName('some/file.proto').name, + 'some/file.proto') + self.assertEqual(pool.FindMessageTypeByName('package.Message').name, + 'Message') + + +@unittest.skipIf( + api_implementation.Type() != 'cpp', + 'default_pool is only supported by the C++ implementation') +class DefaultPoolTest(unittest.TestCase): + + def testFindMethods(self): + # pylint: disable=g-import-not-at-top + from google.protobuf.pyext import _message + pool = _message.default_pool + self.assertIs( + pool.FindFileByName('google/protobuf/unittest.proto'), + unittest_pb2.DESCRIPTOR) + self.assertIs( + pool.FindMessageTypeByName('protobuf_unittest.TestAllTypes'), + unittest_pb2.TestAllTypes.DESCRIPTOR) + self.assertIs( + pool.FindFieldByName('protobuf_unittest.TestAllTypes.optional_int32'), + unittest_pb2.TestAllTypes.DESCRIPTOR.fields_by_name['optional_int32']) + self.assertIs( + pool.FindExtensionByName('protobuf_unittest.optional_int32_extension'), + unittest_pb2.DESCRIPTOR.extensions_by_name['optional_int32_extension']) + self.assertIs( + pool.FindEnumTypeByName('protobuf_unittest.ForeignEnum'), + unittest_pb2.ForeignEnum.DESCRIPTOR) + self.assertIs( + pool.FindOneofByName('protobuf_unittest.TestAllTypes.oneof_field'), + unittest_pb2.TestAllTypes.DESCRIPTOR.oneofs_by_name['oneof_field']) + + def testAddFileDescriptor(self): + # pylint: disable=g-import-not-at-top + from google.protobuf.pyext import _message + pool = _message.default_pool + file_desc = descriptor_pb2.FileDescriptorProto(name='some/file.proto') + pool.Add(file_desc) + pool.AddSerializedFile(file_desc.SerializeToString()) + + +TEST1_FILE = ProtoFile( + 'google/protobuf/internal/descriptor_pool_test1.proto', + 'google.protobuf.python.internal', + { + 'DescriptorPoolTest1': MessageType({ + 'NestedEnum': EnumType([('ALPHA', 1), ('BETA', 2)]), + 'NestedMessage': MessageType({ + 'NestedEnum': EnumType([('EPSILON', 5), ('ZETA', 6)]), + 'DeepNestedMessage': MessageType({ + 'NestedEnum': EnumType([('ETA', 7), ('THETA', 8)]), + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'ETA')), + ('nested_field', StringField(2, 'theta')), + ]), + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'ZETA')), + ('nested_field', StringField(2, 'beta')), + ('deep_nested_message', MessageField(3, 'DeepNestedMessage')), + ]) + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'BETA')), + ('nested_message', MessageField(2, 'NestedMessage')), + ], is_extendable=True), + + 'DescriptorPoolTest2': MessageType({ + 'NestedEnum': EnumType([('GAMMA', 3), ('DELTA', 4)]), + 'NestedMessage': MessageType({ + 'NestedEnum': EnumType([('IOTA', 9), ('KAPPA', 10)]), + 'DeepNestedMessage': MessageType({ + 'NestedEnum': EnumType([('LAMBDA', 11), ('MU', 12)]), + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'MU')), + ('nested_field', StringField(2, 'lambda')), + ]), + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'IOTA')), + ('nested_field', StringField(2, 'delta')), + ('deep_nested_message', MessageField(3, 'DeepNestedMessage')), + ]) + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'GAMMA')), + ('nested_message', MessageField(2, 'NestedMessage')), + ]), + }) + + +TEST2_FILE = ProtoFile( + 'google/protobuf/internal/descriptor_pool_test2.proto', + 'google.protobuf.python.internal', + { + 'DescriptorPoolTest3': MessageType({ + 'NestedEnum': EnumType([('NU', 13), ('XI', 14)]), + 'NestedMessage': MessageType({ + 'NestedEnum': EnumType([('OMICRON', 15), ('PI', 16)]), + 'DeepNestedMessage': MessageType({ + 'NestedEnum': EnumType([('RHO', 17), ('SIGMA', 18)]), + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'RHO')), + ('nested_field', StringField(2, 'sigma')), + ]), + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'PI')), + ('nested_field', StringField(2, 'nu')), + ('deep_nested_message', MessageField(3, 'DeepNestedMessage')), + ]) + }, [ + ('nested_enum', EnumField(1, 'NestedEnum', 'XI')), + ('nested_message', MessageField(2, 'NestedMessage')), + ], extensions=[ + ('descriptor_pool_test', + ExtensionField(1001, 'DescriptorPoolTest1')), + ]), + }, + dependencies=['google/protobuf/internal/descriptor_pool_test1.proto']) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/descriptor_pool_test1_pb2.py b/deps/google/protobuf/internal/descriptor_pool_test1_pb2.py new file mode 100644 index 00000000..f093d830 --- /dev/null +++ b/deps/google/protobuf/internal/descriptor_pool_test1_pb2.py @@ -0,0 +1,474 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/descriptor_pool_test1.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/descriptor_pool_test1.proto', + package='google.protobuf.python.internal', + syntax='proto2', + serialized_pb=_b('\n4google/protobuf/internal/descriptor_pool_test1.proto\x12\x1fgoogle.protobuf.python.internal\"\xfb\x05\n\x13\x44\x65scriptorPoolTest1\x12Z\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest1.NestedEnum:\x04\x42\x45TA\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage\x1a\xfd\x03\n\rNestedMessage\x12h\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.NestedEnum:\x04ZETA\x12\x1a\n\x0cnested_field\x18\x02 \x01(\t:\x04\x62\x65ta\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.NestedEnum:\x03\x45TA\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05theta\" \n\nNestedEnum\x12\x07\n\x03\x45TA\x10\x07\x12\t\n\x05THETA\x10\x08\"#\n\nNestedEnum\x12\x0b\n\x07\x45PSILON\x10\x05\x12\x08\n\x04ZETA\x10\x06\"!\n\nNestedEnum\x12\t\n\x05\x41LPHA\x10\x01\x12\x08\n\x04\x42\x45TA\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xf1\x05\n\x13\x44\x65scriptorPoolTest2\x12[\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest2.NestedEnum:\x05GAMMA\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage\x1a\xfc\x03\n\rNestedMessage\x12h\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.NestedEnum:\x04IOTA\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05\x64\x65lta\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12x\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.NestedEnum:\x02MU\x12\x1c\n\x0cnested_field\x18\x02 \x01(\t:\x06lambda\" \n\nNestedEnum\x12\n\n\x06LAMBDA\x10\x0b\x12\x06\n\x02MU\x10\x0c\"!\n\nNestedEnum\x12\x08\n\x04IOTA\x10\t\x12\t\n\x05KAPPA\x10\n\"\"\n\nNestedEnum\x12\t\n\x05GAMMA\x10\x03\x12\t\n\x05\x44\x45LTA\x10\x04') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ETA', index=0, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='THETA', index=1, number=8, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=738, + serialized_end=770, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM) + +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='EPSILON', index=0, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ZETA', index=1, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=772, + serialized_end=807, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM) + +_DESCRIPTORPOOLTEST1_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ALPHA', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BETA', index=1, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=809, + serialized_end=842, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDENUM) + +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='LAMBDA', index=0, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MU', index=1, number=12, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1506, + serialized_end=1538, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM) + +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='IOTA', index=0, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KAPPA', index=1, number=10, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1540, + serialized_end=1573, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM) + +_DESCRIPTORPOOLTEST2_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='GAMMA', index=0, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DELTA', index=1, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1575, + serialized_end=1609, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDENUM) + + +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor( + name='DeepNestedMessage', + full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=7, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.nested_field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("theta").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=565, + serialized_end=770, +) + +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=6, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.nested_field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("beta").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.deep_nested_message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ], + enum_types=[ + _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=298, + serialized_end=807, +) + +_DESCRIPTORPOOLTEST1 = _descriptor.Descriptor( + name='DescriptorPoolTest1', + full_name='google.protobuf.python.internal.DescriptorPoolTest1', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest1.nested_message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPOOLTEST1_NESTEDMESSAGE, ], + enum_types=[ + _DESCRIPTORPOOLTEST1_NESTEDENUM, + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=90, + serialized_end=853, +) + + +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor( + name='DeepNestedMessage', + full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=12, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.nested_field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("lambda").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1333, + serialized_end=1538, +) + +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=9, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.nested_field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("delta").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.deep_nested_message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ], + enum_types=[ + _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1065, + serialized_end=1573, +) + +_DESCRIPTORPOOLTEST2 = _descriptor.Descriptor( + name='DescriptorPoolTest2', + full_name='google.protobuf.python.internal.DescriptorPoolTest2', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=3, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest2.nested_message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPOOLTEST2_NESTEDMESSAGE, ], + enum_types=[ + _DESCRIPTORPOOLTEST2_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=856, + serialized_end=1609, +) + +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST1 +_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE +_DESCRIPTORPOOLTEST1.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDENUM +_DESCRIPTORPOOLTEST1.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE +_DESCRIPTORPOOLTEST1_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1 +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST2 +_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE +_DESCRIPTORPOOLTEST2.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDENUM +_DESCRIPTORPOOLTEST2.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE +_DESCRIPTORPOOLTEST2_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2 +DESCRIPTOR.message_types_by_name['DescriptorPoolTest1'] = _DESCRIPTORPOOLTEST1 +DESCRIPTOR.message_types_by_name['DescriptorPoolTest2'] = _DESCRIPTORPOOLTEST2 + +DescriptorPoolTest1 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest1', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + + DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict( + DESCRIPTOR = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE, + __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage) + )) + , + DESCRIPTOR = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE, + __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage) + )) + , + DESCRIPTOR = _DESCRIPTORPOOLTEST1, + __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1) + )) +_sym_db.RegisterMessage(DescriptorPoolTest1) +_sym_db.RegisterMessage(DescriptorPoolTest1.NestedMessage) +_sym_db.RegisterMessage(DescriptorPoolTest1.NestedMessage.DeepNestedMessage) + +DescriptorPoolTest2 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest2', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + + DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict( + DESCRIPTOR = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE, + __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage) + )) + , + DESCRIPTOR = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE, + __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage) + )) + , + DESCRIPTOR = _DESCRIPTORPOOLTEST2, + __module__ = 'google.protobuf.internal.descriptor_pool_test1_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2) + )) +_sym_db.RegisterMessage(DescriptorPoolTest2) +_sym_db.RegisterMessage(DescriptorPoolTest2.NestedMessage) +_sym_db.RegisterMessage(DescriptorPoolTest2.NestedMessage.DeepNestedMessage) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/descriptor_pool_test2_pb2.py b/deps/google/protobuf/internal/descriptor_pool_test2_pb2.py new file mode 100644 index 00000000..bcb8e482 --- /dev/null +++ b/deps/google/protobuf/internal/descriptor_pool_test2_pb2.py @@ -0,0 +1,263 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/descriptor_pool_test2.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import descriptor_pool_test1_pb2 as google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/descriptor_pool_test2.proto', + package='google.protobuf.python.internal', + syntax='proto2', + serialized_pb=_b('\n4google/protobuf/internal/descriptor_pool_test2.proto\x12\x1fgoogle.protobuf.python.internal\x1a\x34google/protobuf/internal/descriptor_pool_test1.proto\"\xef\x06\n\x13\x44\x65scriptorPoolTest3\x12X\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum:\x02XI\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage\x1a\xf7\x03\n\rNestedMessage\x12\x66\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum:\x02PI\x12\x18\n\x0cnested_field\x18\x02 \x01(\t:\x02nu\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum:\x03RHO\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05sigma\" \n\nNestedEnum\x12\x07\n\x03RHO\x10\x11\x12\t\n\x05SIGMA\x10\x12\"!\n\nNestedEnum\x12\x0b\n\x07OMICRON\x10\x0f\x12\x06\n\x02PI\x10\x10\"\x1c\n\nNestedEnum\x12\x06\n\x02NU\x10\r\x12\x06\n\x02XI\x10\x0e\x32\x89\x01\n\x14\x64\x65scriptor_pool_test\x12\x34.google.protobuf.python.internal.DescriptorPoolTest1\x18\xe9\x07 \x01(\x0b\x32\x34.google.protobuf.python.internal.DescriptorPoolTest3') + , + dependencies=[google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RHO', index=0, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SIGMA', index=1, number=18, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=786, + serialized_end=818, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM) + +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OMICRON', index=0, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PI', index=1, number=16, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=820, + serialized_end=853, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM) + +_DESCRIPTORPOOLTEST3_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NU', index=0, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='XI', index=1, number=14, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=855, + serialized_end=883, +) +_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDENUM) + + +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor( + name='DeepNestedMessage', + full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=17, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("sigma").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=613, + serialized_end=818, +) + +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=16, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("nu").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.deep_nested_message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ], + enum_types=[ + _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=350, + serialized_end=853, +) + +_DESCRIPTORPOOLTEST3 = _descriptor.Descriptor( + name='DescriptorPoolTest3', + full_name='google.protobuf.python.internal.DescriptorPoolTest3', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=14, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='descriptor_pool_test', full_name='google.protobuf.python.internal.DescriptorPoolTest3.descriptor_pool_test', index=0, + number=1001, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE, ], + enum_types=[ + _DESCRIPTORPOOLTEST3_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=144, + serialized_end=1023, +) + +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3 +_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE +_DESCRIPTORPOOLTEST3.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDENUM +_DESCRIPTORPOOLTEST3.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE +_DESCRIPTORPOOLTEST3_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3 +DESCRIPTOR.message_types_by_name['DescriptorPoolTest3'] = _DESCRIPTORPOOLTEST3 + +DescriptorPoolTest3 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest3', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + + DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict( + DESCRIPTOR = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE, + __module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage) + )) + , + DESCRIPTOR = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE, + __module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage) + )) + , + DESCRIPTOR = _DESCRIPTORPOOLTEST3, + __module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3) + )) +_sym_db.RegisterMessage(DescriptorPoolTest3) +_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage) +_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage.DeepNestedMessage) + +_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'].message_type = _DESCRIPTORPOOLTEST3 +google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DescriptorPoolTest1.RegisterExtension(_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test']) + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/descriptor_test.py b/deps/google/protobuf/internal/descriptor_test.py new file mode 100644 index 00000000..fee09a56 --- /dev/null +++ b/deps/google/protobuf/internal/descriptor_test.py @@ -0,0 +1,821 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unittest for google.protobuf.internal.descriptor.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import sys + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import unittest_custom_options_pb2 +from google.protobuf import unittest_import_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf.internal import api_implementation +from google.protobuf.internal import test_util +from google.protobuf import descriptor +from google.protobuf import descriptor_pool +from google.protobuf import symbol_database +from google.protobuf import text_format + + +TEST_EMPTY_MESSAGE_DESCRIPTOR_ASCII = """ +name: 'TestEmptyMessage' +""" + + +class DescriptorTest(unittest.TestCase): + + def setUp(self): + file_proto = descriptor_pb2.FileDescriptorProto( + name='some/filename/some.proto', + package='protobuf_unittest') + message_proto = file_proto.message_type.add( + name='NestedMessage') + message_proto.field.add( + name='bb', + number=1, + type=descriptor_pb2.FieldDescriptorProto.TYPE_INT32, + label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL) + enum_proto = message_proto.enum_type.add( + name='ForeignEnum') + enum_proto.value.add(name='FOREIGN_FOO', number=4) + enum_proto.value.add(name='FOREIGN_BAR', number=5) + enum_proto.value.add(name='FOREIGN_BAZ', number=6) + + self.pool = self.GetDescriptorPool() + self.pool.Add(file_proto) + self.my_file = self.pool.FindFileByName(file_proto.name) + self.my_message = self.my_file.message_types_by_name[message_proto.name] + self.my_enum = self.my_message.enum_types_by_name[enum_proto.name] + + self.my_method = descriptor.MethodDescriptor( + name='Bar', + full_name='protobuf_unittest.TestService.Bar', + index=0, + containing_service=None, + input_type=None, + output_type=None) + self.my_service = descriptor.ServiceDescriptor( + name='TestServiceWithOptions', + full_name='protobuf_unittest.TestServiceWithOptions', + file=self.my_file, + index=0, + methods=[ + self.my_method + ]) + + def GetDescriptorPool(self): + return symbol_database.Default().pool + + def testEnumValueName(self): + self.assertEqual(self.my_message.EnumValueName('ForeignEnum', 4), + 'FOREIGN_FOO') + + self.assertEqual( + self.my_message.enum_types_by_name[ + 'ForeignEnum'].values_by_number[4].name, + self.my_message.EnumValueName('ForeignEnum', 4)) + + def testEnumFixups(self): + self.assertEqual(self.my_enum, self.my_enum.values[0].type) + + def testContainingTypeFixups(self): + self.assertEqual(self.my_message, self.my_message.fields[0].containing_type) + self.assertEqual(self.my_message, self.my_enum.containing_type) + + def testContainingServiceFixups(self): + self.assertEqual(self.my_service, self.my_method.containing_service) + + def testGetOptions(self): + self.assertEqual(self.my_enum.GetOptions(), + descriptor_pb2.EnumOptions()) + self.assertEqual(self.my_enum.values[0].GetOptions(), + descriptor_pb2.EnumValueOptions()) + self.assertEqual(self.my_message.GetOptions(), + descriptor_pb2.MessageOptions()) + self.assertEqual(self.my_message.fields[0].GetOptions(), + descriptor_pb2.FieldOptions()) + self.assertEqual(self.my_method.GetOptions(), + descriptor_pb2.MethodOptions()) + self.assertEqual(self.my_service.GetOptions(), + descriptor_pb2.ServiceOptions()) + + def testSimpleCustomOptions(self): + file_descriptor = unittest_custom_options_pb2.DESCRIPTOR + message_descriptor =\ + unittest_custom_options_pb2.TestMessageWithCustomOptions.DESCRIPTOR + field_descriptor = message_descriptor.fields_by_name["field1"] + enum_descriptor = message_descriptor.enum_types_by_name["AnEnum"] + enum_value_descriptor =\ + message_descriptor.enum_values_by_name["ANENUM_VAL2"] + service_descriptor =\ + unittest_custom_options_pb2.TestServiceWithCustomOptions.DESCRIPTOR + method_descriptor = service_descriptor.FindMethodByName("Foo") + + file_options = file_descriptor.GetOptions() + file_opt1 = unittest_custom_options_pb2.file_opt1 + self.assertEqual(9876543210, file_options.Extensions[file_opt1]) + message_options = message_descriptor.GetOptions() + message_opt1 = unittest_custom_options_pb2.message_opt1 + self.assertEqual(-56, message_options.Extensions[message_opt1]) + field_options = field_descriptor.GetOptions() + field_opt1 = unittest_custom_options_pb2.field_opt1 + self.assertEqual(8765432109, field_options.Extensions[field_opt1]) + field_opt2 = unittest_custom_options_pb2.field_opt2 + self.assertEqual(42, field_options.Extensions[field_opt2]) + enum_options = enum_descriptor.GetOptions() + enum_opt1 = unittest_custom_options_pb2.enum_opt1 + self.assertEqual(-789, enum_options.Extensions[enum_opt1]) + enum_value_options = enum_value_descriptor.GetOptions() + enum_value_opt1 = unittest_custom_options_pb2.enum_value_opt1 + self.assertEqual(123, enum_value_options.Extensions[enum_value_opt1]) + + service_options = service_descriptor.GetOptions() + service_opt1 = unittest_custom_options_pb2.service_opt1 + self.assertEqual(-9876543210, service_options.Extensions[service_opt1]) + method_options = method_descriptor.GetOptions() + method_opt1 = unittest_custom_options_pb2.method_opt1 + self.assertEqual(unittest_custom_options_pb2.METHODOPT1_VAL2, + method_options.Extensions[method_opt1]) + + message_descriptor = ( + unittest_custom_options_pb2.DummyMessageContainingEnum.DESCRIPTOR) + self.assertTrue(file_descriptor.has_options) + self.assertFalse(message_descriptor.has_options) + + def testDifferentCustomOptionTypes(self): + kint32min = -2**31 + kint64min = -2**63 + kint32max = 2**31 - 1 + kint64max = 2**63 - 1 + kuint32max = 2**32 - 1 + kuint64max = 2**64 - 1 + + message_descriptor =\ + unittest_custom_options_pb2.CustomOptionMinIntegerValues.DESCRIPTOR + message_options = message_descriptor.GetOptions() + self.assertEqual(False, message_options.Extensions[ + unittest_custom_options_pb2.bool_opt]) + self.assertEqual(kint32min, message_options.Extensions[ + unittest_custom_options_pb2.int32_opt]) + self.assertEqual(kint64min, message_options.Extensions[ + unittest_custom_options_pb2.int64_opt]) + self.assertEqual(0, message_options.Extensions[ + unittest_custom_options_pb2.uint32_opt]) + self.assertEqual(0, message_options.Extensions[ + unittest_custom_options_pb2.uint64_opt]) + self.assertEqual(kint32min, message_options.Extensions[ + unittest_custom_options_pb2.sint32_opt]) + self.assertEqual(kint64min, message_options.Extensions[ + unittest_custom_options_pb2.sint64_opt]) + self.assertEqual(0, message_options.Extensions[ + unittest_custom_options_pb2.fixed32_opt]) + self.assertEqual(0, message_options.Extensions[ + unittest_custom_options_pb2.fixed64_opt]) + self.assertEqual(kint32min, message_options.Extensions[ + unittest_custom_options_pb2.sfixed32_opt]) + self.assertEqual(kint64min, message_options.Extensions[ + unittest_custom_options_pb2.sfixed64_opt]) + + message_descriptor =\ + unittest_custom_options_pb2.CustomOptionMaxIntegerValues.DESCRIPTOR + message_options = message_descriptor.GetOptions() + self.assertEqual(True, message_options.Extensions[ + unittest_custom_options_pb2.bool_opt]) + self.assertEqual(kint32max, message_options.Extensions[ + unittest_custom_options_pb2.int32_opt]) + self.assertEqual(kint64max, message_options.Extensions[ + unittest_custom_options_pb2.int64_opt]) + self.assertEqual(kuint32max, message_options.Extensions[ + unittest_custom_options_pb2.uint32_opt]) + self.assertEqual(kuint64max, message_options.Extensions[ + unittest_custom_options_pb2.uint64_opt]) + self.assertEqual(kint32max, message_options.Extensions[ + unittest_custom_options_pb2.sint32_opt]) + self.assertEqual(kint64max, message_options.Extensions[ + unittest_custom_options_pb2.sint64_opt]) + self.assertEqual(kuint32max, message_options.Extensions[ + unittest_custom_options_pb2.fixed32_opt]) + self.assertEqual(kuint64max, message_options.Extensions[ + unittest_custom_options_pb2.fixed64_opt]) + self.assertEqual(kint32max, message_options.Extensions[ + unittest_custom_options_pb2.sfixed32_opt]) + self.assertEqual(kint64max, message_options.Extensions[ + unittest_custom_options_pb2.sfixed64_opt]) + + message_descriptor =\ + unittest_custom_options_pb2.CustomOptionOtherValues.DESCRIPTOR + message_options = message_descriptor.GetOptions() + self.assertEqual(-100, message_options.Extensions[ + unittest_custom_options_pb2.int32_opt]) + self.assertAlmostEqual(12.3456789, message_options.Extensions[ + unittest_custom_options_pb2.float_opt], 6) + self.assertAlmostEqual(1.234567890123456789, message_options.Extensions[ + unittest_custom_options_pb2.double_opt]) + self.assertEqual("Hello, \"World\"", message_options.Extensions[ + unittest_custom_options_pb2.string_opt]) + self.assertEqual(b"Hello\0World", message_options.Extensions[ + unittest_custom_options_pb2.bytes_opt]) + dummy_enum = unittest_custom_options_pb2.DummyMessageContainingEnum + self.assertEqual( + dummy_enum.TEST_OPTION_ENUM_TYPE2, + message_options.Extensions[unittest_custom_options_pb2.enum_opt]) + + message_descriptor =\ + unittest_custom_options_pb2.SettingRealsFromPositiveInts.DESCRIPTOR + message_options = message_descriptor.GetOptions() + self.assertAlmostEqual(12, message_options.Extensions[ + unittest_custom_options_pb2.float_opt], 6) + self.assertAlmostEqual(154, message_options.Extensions[ + unittest_custom_options_pb2.double_opt]) + + message_descriptor =\ + unittest_custom_options_pb2.SettingRealsFromNegativeInts.DESCRIPTOR + message_options = message_descriptor.GetOptions() + self.assertAlmostEqual(-12, message_options.Extensions[ + unittest_custom_options_pb2.float_opt], 6) + self.assertAlmostEqual(-154, message_options.Extensions[ + unittest_custom_options_pb2.double_opt]) + + def testComplexExtensionOptions(self): + descriptor =\ + unittest_custom_options_pb2.VariousComplexOptions.DESCRIPTOR + options = descriptor.GetOptions() + self.assertEqual(42, options.Extensions[ + unittest_custom_options_pb2.complex_opt1].foo) + self.assertEqual(324, options.Extensions[ + unittest_custom_options_pb2.complex_opt1].Extensions[ + unittest_custom_options_pb2.quux]) + self.assertEqual(876, options.Extensions[ + unittest_custom_options_pb2.complex_opt1].Extensions[ + unittest_custom_options_pb2.corge].qux) + self.assertEqual(987, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].baz) + self.assertEqual(654, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].Extensions[ + unittest_custom_options_pb2.grault]) + self.assertEqual(743, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].bar.foo) + self.assertEqual(1999, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].bar.Extensions[ + unittest_custom_options_pb2.quux]) + self.assertEqual(2008, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].bar.Extensions[ + unittest_custom_options_pb2.corge].qux) + self.assertEqual(741, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].Extensions[ + unittest_custom_options_pb2.garply].foo) + self.assertEqual(1998, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].Extensions[ + unittest_custom_options_pb2.garply].Extensions[ + unittest_custom_options_pb2.quux]) + self.assertEqual(2121, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].Extensions[ + unittest_custom_options_pb2.garply].Extensions[ + unittest_custom_options_pb2.corge].qux) + self.assertEqual(1971, options.Extensions[ + unittest_custom_options_pb2.ComplexOptionType2 + .ComplexOptionType4.complex_opt4].waldo) + self.assertEqual(321, options.Extensions[ + unittest_custom_options_pb2.complex_opt2].fred.waldo) + self.assertEqual(9, options.Extensions[ + unittest_custom_options_pb2.complex_opt3].qux) + self.assertEqual(22, options.Extensions[ + unittest_custom_options_pb2.complex_opt3].complexoptiontype5.plugh) + self.assertEqual(24, options.Extensions[ + unittest_custom_options_pb2.complexopt6].xyzzy) + + # Check that aggregate options were parsed and saved correctly in + # the appropriate descriptors. + def testAggregateOptions(self): + file_descriptor = unittest_custom_options_pb2.DESCRIPTOR + message_descriptor =\ + unittest_custom_options_pb2.AggregateMessage.DESCRIPTOR + field_descriptor = message_descriptor.fields_by_name["fieldname"] + enum_descriptor = unittest_custom_options_pb2.AggregateEnum.DESCRIPTOR + enum_value_descriptor = enum_descriptor.values_by_name["VALUE"] + service_descriptor =\ + unittest_custom_options_pb2.AggregateService.DESCRIPTOR + method_descriptor = service_descriptor.FindMethodByName("Method") + + # Tests for the different types of data embedded in fileopt + file_options = file_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.fileopt] + self.assertEqual(100, file_options.i) + self.assertEqual("FileAnnotation", file_options.s) + self.assertEqual("NestedFileAnnotation", file_options.sub.s) + self.assertEqual("FileExtensionAnnotation", file_options.file.Extensions[ + unittest_custom_options_pb2.fileopt].s) + self.assertEqual("EmbeddedMessageSetElement", file_options.mset.Extensions[ + unittest_custom_options_pb2.AggregateMessageSetElement + .message_set_extension].s) + + # Simple tests for all the other types of annotations + self.assertEqual( + "MessageAnnotation", + message_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.msgopt].s) + self.assertEqual( + "FieldAnnotation", + field_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.fieldopt].s) + self.assertEqual( + "EnumAnnotation", + enum_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.enumopt].s) + self.assertEqual( + "EnumValueAnnotation", + enum_value_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.enumvalopt].s) + self.assertEqual( + "ServiceAnnotation", + service_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.serviceopt].s) + self.assertEqual( + "MethodAnnotation", + method_descriptor.GetOptions().Extensions[ + unittest_custom_options_pb2.methodopt].s) + + def testNestedOptions(self): + nested_message =\ + unittest_custom_options_pb2.NestedOptionType.NestedMessage.DESCRIPTOR + self.assertEqual(1001, nested_message.GetOptions().Extensions[ + unittest_custom_options_pb2.message_opt1]) + nested_field = nested_message.fields_by_name["nested_field"] + self.assertEqual(1002, nested_field.GetOptions().Extensions[ + unittest_custom_options_pb2.field_opt1]) + outer_message =\ + unittest_custom_options_pb2.NestedOptionType.DESCRIPTOR + nested_enum = outer_message.enum_types_by_name["NestedEnum"] + self.assertEqual(1003, nested_enum.GetOptions().Extensions[ + unittest_custom_options_pb2.enum_opt1]) + nested_enum_value = outer_message.enum_values_by_name["NESTED_ENUM_VALUE"] + self.assertEqual(1004, nested_enum_value.GetOptions().Extensions[ + unittest_custom_options_pb2.enum_value_opt1]) + nested_extension = outer_message.extensions_by_name["nested_extension"] + self.assertEqual(1005, nested_extension.GetOptions().Extensions[ + unittest_custom_options_pb2.field_opt2]) + + def testFileDescriptorReferences(self): + self.assertEqual(self.my_enum.file, self.my_file) + self.assertEqual(self.my_message.file, self.my_file) + + def testFileDescriptor(self): + self.assertEqual(self.my_file.name, 'some/filename/some.proto') + self.assertEqual(self.my_file.package, 'protobuf_unittest') + self.assertEqual(self.my_file.pool, self.pool) + # Generated modules also belong to the default pool. + self.assertEqual(unittest_pb2.DESCRIPTOR.pool, descriptor_pool.Default()) + + @unittest.skipIf( + api_implementation.Type() != 'cpp' or api_implementation.Version() != 2, + 'Immutability of descriptors is only enforced in v2 implementation') + def testImmutableCppDescriptor(self): + message_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR + with self.assertRaises(AttributeError): + message_descriptor.fields_by_name = None + with self.assertRaises(TypeError): + message_descriptor.fields_by_name['Another'] = None + with self.assertRaises(TypeError): + message_descriptor.fields.append(None) + + +class NewDescriptorTest(DescriptorTest): + """Redo the same tests as above, but with a separate DescriptorPool.""" + + def GetDescriptorPool(self): + return descriptor_pool.DescriptorPool() + + +class GeneratedDescriptorTest(unittest.TestCase): + """Tests for the properties of descriptors in generated code.""" + + def CheckMessageDescriptor(self, message_descriptor): + # Basic properties + self.assertEqual(message_descriptor.name, 'TestAllTypes') + self.assertEqual(message_descriptor.full_name, + 'protobuf_unittest.TestAllTypes') + # Test equality and hashability + self.assertEqual(message_descriptor, message_descriptor) + self.assertEqual(message_descriptor.fields[0].containing_type, + message_descriptor) + self.assertIn(message_descriptor, [message_descriptor]) + self.assertIn(message_descriptor, {message_descriptor: None}) + # Test field containers + self.CheckDescriptorSequence(message_descriptor.fields) + self.CheckDescriptorMapping(message_descriptor.fields_by_name) + self.CheckDescriptorMapping(message_descriptor.fields_by_number) + self.CheckDescriptorMapping(message_descriptor.fields_by_camelcase_name) + + def CheckFieldDescriptor(self, field_descriptor): + # Basic properties + self.assertEqual(field_descriptor.name, 'optional_int32') + self.assertEqual(field_descriptor.camelcase_name, 'optionalInt32') + self.assertEqual(field_descriptor.full_name, + 'protobuf_unittest.TestAllTypes.optional_int32') + self.assertEqual(field_descriptor.containing_type.name, 'TestAllTypes') + # Test equality and hashability + self.assertEqual(field_descriptor, field_descriptor) + self.assertEqual( + field_descriptor.containing_type.fields_by_name['optional_int32'], + field_descriptor) + self.assertEqual( + field_descriptor.containing_type.fields_by_camelcase_name[ + 'optionalInt32'], + field_descriptor) + self.assertIn(field_descriptor, [field_descriptor]) + self.assertIn(field_descriptor, {field_descriptor: None}) + + def CheckDescriptorSequence(self, sequence): + # Verifies that a property like 'messageDescriptor.fields' has all the + # properties of an immutable abc.Sequence. + self.assertGreater(len(sequence), 0) # Sized + self.assertEqual(len(sequence), len(list(sequence))) # Iterable + item = sequence[0] + self.assertEqual(item, sequence[0]) + self.assertIn(item, sequence) # Container + self.assertEqual(sequence.index(item), 0) + self.assertEqual(sequence.count(item), 1) + reversed_iterator = reversed(sequence) + self.assertEqual(list(reversed_iterator), list(sequence)[::-1]) + self.assertRaises(StopIteration, next, reversed_iterator) + + def CheckDescriptorMapping(self, mapping): + # Verifies that a property like 'messageDescriptor.fields' has all the + # properties of an immutable abc.Mapping. + self.assertGreater(len(mapping), 0) # Sized + self.assertEqual(len(mapping), len(list(mapping))) # Iterable + if sys.version_info >= (3,): + key, item = next(iter(mapping.items())) + else: + key, item = mapping.items()[0] + self.assertIn(key, mapping) # Container + self.assertEqual(mapping.get(key), item) + # keys(), iterkeys() &co + item = (next(iter(mapping.keys())), next(iter(mapping.values()))) + self.assertEqual(item, next(iter(mapping.items()))) + if sys.version_info < (3,): + def CheckItems(seq, iterator): + self.assertEqual(next(iterator), seq[0]) + self.assertEqual(list(iterator), seq[1:]) + CheckItems(mapping.keys(), mapping.iterkeys()) + CheckItems(mapping.values(), mapping.itervalues()) + CheckItems(mapping.items(), mapping.iteritems()) + + def testDescriptor(self): + message_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR + self.CheckMessageDescriptor(message_descriptor) + field_descriptor = message_descriptor.fields_by_name['optional_int32'] + self.CheckFieldDescriptor(field_descriptor) + field_descriptor = message_descriptor.fields_by_camelcase_name[ + 'optionalInt32'] + self.CheckFieldDescriptor(field_descriptor) + + def testCppDescriptorContainer(self): + # Check that the collection is still valid even if the parent disappeared. + enum = unittest_pb2.TestAllTypes.DESCRIPTOR.enum_types_by_name['NestedEnum'] + values = enum.values + del enum + self.assertEqual('FOO', values[0].name) + + def testCppDescriptorContainer_Iterator(self): + # Same test with the iterator + enum = unittest_pb2.TestAllTypes.DESCRIPTOR.enum_types_by_name['NestedEnum'] + values_iter = iter(enum.values) + del enum + self.assertEqual('FOO', next(values_iter).name) + + +class DescriptorCopyToProtoTest(unittest.TestCase): + """Tests for CopyTo functions of Descriptor.""" + + def _AssertProtoEqual(self, actual_proto, expected_class, expected_ascii): + expected_proto = expected_class() + text_format.Merge(expected_ascii, expected_proto) + + self.assertEqual( + actual_proto, expected_proto, + 'Not equal,\nActual:\n%s\nExpected:\n%s\n' + % (str(actual_proto), str(expected_proto))) + + def _InternalTestCopyToProto(self, desc, expected_proto_class, + expected_proto_ascii): + actual = expected_proto_class() + desc.CopyToProto(actual) + self._AssertProtoEqual( + actual, expected_proto_class, expected_proto_ascii) + + def testCopyToProto_EmptyMessage(self): + self._InternalTestCopyToProto( + unittest_pb2.TestEmptyMessage.DESCRIPTOR, + descriptor_pb2.DescriptorProto, + TEST_EMPTY_MESSAGE_DESCRIPTOR_ASCII) + + def testCopyToProto_NestedMessage(self): + TEST_NESTED_MESSAGE_ASCII = """ + name: 'NestedMessage' + field: < + name: 'bb' + number: 1 + label: 1 # Optional + type: 5 # TYPE_INT32 + > + """ + + self._InternalTestCopyToProto( + unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR, + descriptor_pb2.DescriptorProto, + TEST_NESTED_MESSAGE_ASCII) + + def testCopyToProto_ForeignNestedMessage(self): + TEST_FOREIGN_NESTED_ASCII = """ + name: 'TestForeignNested' + field: < + name: 'foreign_nested' + number: 1 + label: 1 # Optional + type: 11 # TYPE_MESSAGE + type_name: '.protobuf_unittest.TestAllTypes.NestedMessage' + > + """ + + self._InternalTestCopyToProto( + unittest_pb2.TestForeignNested.DESCRIPTOR, + descriptor_pb2.DescriptorProto, + TEST_FOREIGN_NESTED_ASCII) + + def testCopyToProto_ForeignEnum(self): + TEST_FOREIGN_ENUM_ASCII = """ + name: 'ForeignEnum' + value: < + name: 'FOREIGN_FOO' + number: 4 + > + value: < + name: 'FOREIGN_BAR' + number: 5 + > + value: < + name: 'FOREIGN_BAZ' + number: 6 + > + """ + + self._InternalTestCopyToProto( + unittest_pb2._FOREIGNENUM, + descriptor_pb2.EnumDescriptorProto, + TEST_FOREIGN_ENUM_ASCII) + + def testCopyToProto_Options(self): + TEST_DEPRECATED_FIELDS_ASCII = """ + name: 'TestDeprecatedFields' + field: < + name: 'deprecated_int32' + number: 1 + label: 1 # Optional + type: 5 # TYPE_INT32 + options: < + deprecated: true + > + > + """ + + self._InternalTestCopyToProto( + unittest_pb2.TestDeprecatedFields.DESCRIPTOR, + descriptor_pb2.DescriptorProto, + TEST_DEPRECATED_FIELDS_ASCII) + + def testCopyToProto_AllExtensions(self): + TEST_EMPTY_MESSAGE_WITH_EXTENSIONS_ASCII = """ + name: 'TestEmptyMessageWithExtensions' + extension_range: < + start: 1 + end: 536870912 + > + """ + + self._InternalTestCopyToProto( + unittest_pb2.TestEmptyMessageWithExtensions.DESCRIPTOR, + descriptor_pb2.DescriptorProto, + TEST_EMPTY_MESSAGE_WITH_EXTENSIONS_ASCII) + + def testCopyToProto_SeveralExtensions(self): + TEST_MESSAGE_WITH_SEVERAL_EXTENSIONS_ASCII = """ + name: 'TestMultipleExtensionRanges' + extension_range: < + start: 42 + end: 43 + > + extension_range: < + start: 4143 + end: 4244 + > + extension_range: < + start: 65536 + end: 536870912 + > + """ + + self._InternalTestCopyToProto( + unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR, + descriptor_pb2.DescriptorProto, + TEST_MESSAGE_WITH_SEVERAL_EXTENSIONS_ASCII) + + # Disable this test so we can make changes to the proto file. + # TODO(xiaofeng): Enable this test after cl/55530659 is submitted. + # + # def testCopyToProto_FileDescriptor(self): + # UNITTEST_IMPORT_FILE_DESCRIPTOR_ASCII = (""" + # name: 'google/protobuf/unittest_import.proto' + # package: 'protobuf_unittest_import' + # dependency: 'google/protobuf/unittest_import_public.proto' + # message_type: < + # name: 'ImportMessage' + # field: < + # name: 'd' + # number: 1 + # label: 1 # Optional + # type: 5 # TYPE_INT32 + # > + # > + # """ + + # """enum_type: < + # name: 'ImportEnum' + # value: < + # name: 'IMPORT_FOO' + # number: 7 + # > + # value: < + # name: 'IMPORT_BAR' + # number: 8 + # > + # value: < + # name: 'IMPORT_BAZ' + # number: 9 + # > + # > + # options: < + # java_package: 'com.google.protobuf.test' + # optimize_for: 1 # SPEED + # > + # public_dependency: 0 + # """) + # self._InternalTestCopyToProto( + # unittest_import_pb2.DESCRIPTOR, + # descriptor_pb2.FileDescriptorProto, + # UNITTEST_IMPORT_FILE_DESCRIPTOR_ASCII) + + def testCopyToProto_ServiceDescriptor(self): + TEST_SERVICE_ASCII = """ + name: 'TestService' + method: < + name: 'Foo' + input_type: '.protobuf_unittest.FooRequest' + output_type: '.protobuf_unittest.FooResponse' + > + method: < + name: 'Bar' + input_type: '.protobuf_unittest.BarRequest' + output_type: '.protobuf_unittest.BarResponse' + > + """ + # TODO(rocking): enable this test after the proto descriptor change is + # checked in. + #self._InternalTestCopyToProto( + # unittest_pb2.TestService.DESCRIPTOR, + # descriptor_pb2.ServiceDescriptorProto, + # TEST_SERVICE_ASCII) + + +class MakeDescriptorTest(unittest.TestCase): + + def testMakeDescriptorWithNestedFields(self): + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.name = 'Foo2' + message_type = file_descriptor_proto.message_type.add() + message_type.name = file_descriptor_proto.name + nested_type = message_type.nested_type.add() + nested_type.name = 'Sub' + enum_type = nested_type.enum_type.add() + enum_type.name = 'FOO' + enum_type_val = enum_type.value.add() + enum_type_val.name = 'BAR' + enum_type_val.number = 3 + field = message_type.field.add() + field.number = 1 + field.name = 'uint64_field' + field.label = descriptor.FieldDescriptor.LABEL_REQUIRED + field.type = descriptor.FieldDescriptor.TYPE_UINT64 + field = message_type.field.add() + field.number = 2 + field.name = 'nested_message_field' + field.label = descriptor.FieldDescriptor.LABEL_REQUIRED + field.type = descriptor.FieldDescriptor.TYPE_MESSAGE + field.type_name = 'Sub' + enum_field = nested_type.field.add() + enum_field.number = 2 + enum_field.name = 'bar_field' + enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED + enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM + enum_field.type_name = 'Foo2.Sub.FOO' + + result = descriptor.MakeDescriptor(message_type) + self.assertEqual(result.fields[0].cpp_type, + descriptor.FieldDescriptor.CPPTYPE_UINT64) + self.assertEqual(result.fields[1].cpp_type, + descriptor.FieldDescriptor.CPPTYPE_MESSAGE) + self.assertEqual(result.fields[1].message_type.containing_type, + result) + self.assertEqual(result.nested_types[0].fields[0].full_name, + 'Foo2.Sub.bar_field') + self.assertEqual(result.nested_types[0].fields[0].enum_type, + result.nested_types[0].enum_types[0]) + + def testMakeDescriptorWithUnsignedIntField(self): + file_descriptor_proto = descriptor_pb2.FileDescriptorProto() + file_descriptor_proto.name = 'Foo' + message_type = file_descriptor_proto.message_type.add() + message_type.name = file_descriptor_proto.name + enum_type = message_type.enum_type.add() + enum_type.name = 'FOO' + enum_type_val = enum_type.value.add() + enum_type_val.name = 'BAR' + enum_type_val.number = 3 + field = message_type.field.add() + field.number = 1 + field.name = 'uint64_field' + field.label = descriptor.FieldDescriptor.LABEL_REQUIRED + field.type = descriptor.FieldDescriptor.TYPE_UINT64 + enum_field = message_type.field.add() + enum_field.number = 2 + enum_field.name = 'bar_field' + enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED + enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM + enum_field.type_name = 'Foo.FOO' + + result = descriptor.MakeDescriptor(message_type) + self.assertEqual(result.fields[0].cpp_type, + descriptor.FieldDescriptor.CPPTYPE_UINT64) + + + def testMakeDescriptorWithOptions(self): + descriptor_proto = descriptor_pb2.DescriptorProto() + aggregate_message = unittest_custom_options_pb2.AggregateMessage + aggregate_message.DESCRIPTOR.CopyToProto(descriptor_proto) + reformed_descriptor = descriptor.MakeDescriptor(descriptor_proto) + + options = reformed_descriptor.GetOptions() + self.assertEqual(101, + options.Extensions[unittest_custom_options_pb2.msgopt].i) + + def testCamelcaseName(self): + descriptor_proto = descriptor_pb2.DescriptorProto() + descriptor_proto.name = 'Bar' + names = ['foo_foo', 'FooBar', 'fooBaz', 'fooFoo', 'foobar'] + camelcase_names = ['fooFoo', 'fooBar', 'fooBaz', 'fooFoo', 'foobar'] + for index in range(len(names)): + field = descriptor_proto.field.add() + field.number = index + 1 + field.name = names[index] + result = descriptor.MakeDescriptor(descriptor_proto) + for index in range(len(camelcase_names)): + self.assertEqual(result.fields[index].camelcase_name, + camelcase_names[index]) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/encoder.py b/deps/google/protobuf/internal/encoder.py new file mode 100644 index 00000000..48ef2df3 --- /dev/null +++ b/deps/google/protobuf/internal/encoder.py @@ -0,0 +1,823 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Code for encoding protocol message primitives. + +Contains the logic for encoding every logical protocol field type +into one of the 5 physical wire types. + +This code is designed to push the Python interpreter's performance to the +limits. + +The basic idea is that at startup time, for every field (i.e. every +FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The +sizer takes a value of this field's type and computes its byte size. The +encoder takes a writer function and a value. It encodes the value into byte +strings and invokes the writer function to write those strings. Typically the +writer function is the write() method of a BytesIO. + +We try to do as much work as possible when constructing the writer and the +sizer rather than when calling them. In particular: +* We copy any needed global functions to local variables, so that we do not need + to do costly global table lookups at runtime. +* Similarly, we try to do any attribute lookups at startup time if possible. +* Every field's tag is encoded to bytes at startup, since it can't change at + runtime. +* Whatever component of the field size we can compute at startup, we do. +* We *avoid* sharing code if doing so would make the code slower and not sharing + does not burden us too much. For example, encoders for repeated fields do + not just call the encoders for singular fields in a loop because this would + add an extra function call overhead for every loop iteration; instead, we + manually inline the single-value encoder into the loop. +* If a Python function lacks a return statement, Python actually generates + instructions to pop the result of the last statement off the stack, push + None onto the stack, and then return that. If we really don't care what + value is returned, then we can save two instructions by returning the + result of the last statement. It looks funny but it helps. +* We assume that type and bounds checking has happened at a higher level. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import struct + +import six + +from google.protobuf.internal import wire_format + + +# This will overflow and thus become IEEE-754 "infinity". We would use +# "float('inf')" but it doesn't work on Windows pre-Python-2.6. +_POS_INF = 1e10000 +_NEG_INF = -_POS_INF + + +def _VarintSize(value): + """Compute the size of a varint value.""" + if value <= 0x7f: return 1 + if value <= 0x3fff: return 2 + if value <= 0x1fffff: return 3 + if value <= 0xfffffff: return 4 + if value <= 0x7ffffffff: return 5 + if value <= 0x3ffffffffff: return 6 + if value <= 0x1ffffffffffff: return 7 + if value <= 0xffffffffffffff: return 8 + if value <= 0x7fffffffffffffff: return 9 + return 10 + + +def _SignedVarintSize(value): + """Compute the size of a signed varint value.""" + if value < 0: return 10 + if value <= 0x7f: return 1 + if value <= 0x3fff: return 2 + if value <= 0x1fffff: return 3 + if value <= 0xfffffff: return 4 + if value <= 0x7ffffffff: return 5 + if value <= 0x3ffffffffff: return 6 + if value <= 0x1ffffffffffff: return 7 + if value <= 0xffffffffffffff: return 8 + if value <= 0x7fffffffffffffff: return 9 + return 10 + + +def _TagSize(field_number): + """Returns the number of bytes required to serialize a tag with this field + number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarintSize(wire_format.PackTag(field_number, 0)) + + +# -------------------------------------------------------------------- +# In this section we define some generic sizers. Each of these functions +# takes parameters specific to a particular field type, e.g. int32 or fixed64. +# It returns another function which in turn takes parameters specific to a +# particular field, e.g. the field number and whether it is repeated or packed. +# Look at the next section to see how these are used. + + +def _SimpleSizer(compute_value_size): + """A sizer which uses the function compute_value_size to compute the size of + each value. Typically compute_value_size is _VarintSize.""" + + def SpecificSizer(field_number, is_repeated, is_packed): + tag_size = _TagSize(field_number) + if is_packed: + local_VarintSize = _VarintSize + def PackedFieldSize(value): + result = 0 + for element in value: + result += compute_value_size(element) + return result + local_VarintSize(result) + tag_size + return PackedFieldSize + elif is_repeated: + def RepeatedFieldSize(value): + result = tag_size * len(value) + for element in value: + result += compute_value_size(element) + return result + return RepeatedFieldSize + else: + def FieldSize(value): + return tag_size + compute_value_size(value) + return FieldSize + + return SpecificSizer + + +def _ModifiedSizer(compute_value_size, modify_value): + """Like SimpleSizer, but modify_value is invoked on each value before it is + passed to compute_value_size. modify_value is typically ZigZagEncode.""" + + def SpecificSizer(field_number, is_repeated, is_packed): + tag_size = _TagSize(field_number) + if is_packed: + local_VarintSize = _VarintSize + def PackedFieldSize(value): + result = 0 + for element in value: + result += compute_value_size(modify_value(element)) + return result + local_VarintSize(result) + tag_size + return PackedFieldSize + elif is_repeated: + def RepeatedFieldSize(value): + result = tag_size * len(value) + for element in value: + result += compute_value_size(modify_value(element)) + return result + return RepeatedFieldSize + else: + def FieldSize(value): + return tag_size + compute_value_size(modify_value(value)) + return FieldSize + + return SpecificSizer + + +def _FixedSizer(value_size): + """Like _SimpleSizer except for a fixed-size field. The input is the size + of one value.""" + + def SpecificSizer(field_number, is_repeated, is_packed): + tag_size = _TagSize(field_number) + if is_packed: + local_VarintSize = _VarintSize + def PackedFieldSize(value): + result = len(value) * value_size + return result + local_VarintSize(result) + tag_size + return PackedFieldSize + elif is_repeated: + element_size = value_size + tag_size + def RepeatedFieldSize(value): + return len(value) * element_size + return RepeatedFieldSize + else: + field_size = value_size + tag_size + def FieldSize(value): + return field_size + return FieldSize + + return SpecificSizer + + +# ==================================================================== +# Here we declare a sizer constructor for each field type. Each "sizer +# constructor" is a function that takes (field_number, is_repeated, is_packed) +# as parameters and returns a sizer, which in turn takes a field value as +# a parameter and returns its encoded size. + + +Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize) + +UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize) + +SInt32Sizer = SInt64Sizer = _ModifiedSizer( + _SignedVarintSize, wire_format.ZigZagEncode) + +Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4) +Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8) + +BoolSizer = _FixedSizer(1) + + +def StringSizer(field_number, is_repeated, is_packed): + """Returns a sizer for a string field.""" + + tag_size = _TagSize(field_number) + local_VarintSize = _VarintSize + local_len = len + assert not is_packed + if is_repeated: + def RepeatedFieldSize(value): + result = tag_size * len(value) + for element in value: + l = local_len(element.encode('utf-8')) + result += local_VarintSize(l) + l + return result + return RepeatedFieldSize + else: + def FieldSize(value): + l = local_len(value.encode('utf-8')) + return tag_size + local_VarintSize(l) + l + return FieldSize + + +def BytesSizer(field_number, is_repeated, is_packed): + """Returns a sizer for a bytes field.""" + + tag_size = _TagSize(field_number) + local_VarintSize = _VarintSize + local_len = len + assert not is_packed + if is_repeated: + def RepeatedFieldSize(value): + result = tag_size * len(value) + for element in value: + l = local_len(element) + result += local_VarintSize(l) + l + return result + return RepeatedFieldSize + else: + def FieldSize(value): + l = local_len(value) + return tag_size + local_VarintSize(l) + l + return FieldSize + + +def GroupSizer(field_number, is_repeated, is_packed): + """Returns a sizer for a group field.""" + + tag_size = _TagSize(field_number) * 2 + assert not is_packed + if is_repeated: + def RepeatedFieldSize(value): + result = tag_size * len(value) + for element in value: + result += element.ByteSize() + return result + return RepeatedFieldSize + else: + def FieldSize(value): + return tag_size + value.ByteSize() + return FieldSize + + +def MessageSizer(field_number, is_repeated, is_packed): + """Returns a sizer for a message field.""" + + tag_size = _TagSize(field_number) + local_VarintSize = _VarintSize + assert not is_packed + if is_repeated: + def RepeatedFieldSize(value): + result = tag_size * len(value) + for element in value: + l = element.ByteSize() + result += local_VarintSize(l) + l + return result + return RepeatedFieldSize + else: + def FieldSize(value): + l = value.ByteSize() + return tag_size + local_VarintSize(l) + l + return FieldSize + + +# -------------------------------------------------------------------- +# MessageSet is special: it needs custom logic to compute its size properly. + + +def MessageSetItemSizer(field_number): + """Returns a sizer for extensions of MessageSet. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) + + _TagSize(3)) + local_VarintSize = _VarintSize + + def FieldSize(value): + l = value.ByteSize() + return static_size + local_VarintSize(l) + l + + return FieldSize + + +# -------------------------------------------------------------------- +# Map is special: it needs custom logic to compute its size properly. + + +def MapSizer(field_descriptor): + """Returns a sizer for a map field.""" + + # Can't look at field_descriptor.message_type._concrete_class because it may + # not have been initialized yet. + message_type = field_descriptor.message_type + message_sizer = MessageSizer(field_descriptor.number, False, False) + + def FieldSize(map_value): + total = 0 + for key in map_value: + value = map_value[key] + # It's wasteful to create the messages and throw them away one second + # later since we'll do the same for the actual encode. But there's not an + # obvious way to avoid this within the current design without tons of code + # duplication. + entry_msg = message_type._concrete_class(key=key, value=value) + total += message_sizer(entry_msg) + return total + + return FieldSize + +# ==================================================================== +# Encoders! + + +def _VarintEncoder(): + """Return an encoder for a basic varint value (does not include tag).""" + + def EncodeVarint(write, value): + bits = value & 0x7f + value >>= 7 + while value: + write(six.int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(six.int2byte(bits)) + + return EncodeVarint + + +def _SignedVarintEncoder(): + """Return an encoder for a basic signed varint value (does not include + tag).""" + + def EncodeSignedVarint(write, value): + if value < 0: + value += (1 << 64) + bits = value & 0x7f + value >>= 7 + while value: + write(six.int2byte(0x80|bits)) + bits = value & 0x7f + value >>= 7 + return write(six.int2byte(bits)) + + return EncodeSignedVarint + + +_EncodeVarint = _VarintEncoder() +_EncodeSignedVarint = _SignedVarintEncoder() + + +def _VarintBytes(value): + """Encode the given integer as a varint and return the bytes. This is only + called at startup time so it doesn't need to be fast.""" + + pieces = [] + _EncodeVarint(pieces.append, value) + return b"".join(pieces) + + +def TagBytes(field_number, wire_type): + """Encode the given tag and return the bytes. Only called at startup.""" + + return _VarintBytes(wire_format.PackTag(field_number, wire_type)) + +# -------------------------------------------------------------------- +# As with sizers (see above), we have a number of common encoder +# implementations. + + +def _SimpleEncoder(wire_type, encode_value, compute_value_size): + """Return a constructor for an encoder for fields of a particular type. + + Args: + wire_type: The field's wire type, for encoding tags. + encode_value: A function which encodes an individual value, e.g. + _EncodeVarint(). + compute_value_size: A function which computes the size of an individual + value, e.g. _VarintSize(). + """ + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(element) + local_EncodeVarint(write, size) + for element in value: + encode_value(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value): + for element in value: + write(tag_bytes) + encode_value(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value): + write(tag_bytes) + return encode_value(write, value) + return EncodeField + + return SpecificEncoder + + +def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): + """Like SimpleEncoder but additionally invokes modify_value on every value + before passing it to encode_value. Usually modify_value is ZigZagEncode.""" + + def SpecificEncoder(field_number, is_repeated, is_packed): + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value): + write(tag_bytes) + size = 0 + for element in value: + size += compute_value_size(modify_value(element)) + local_EncodeVarint(write, size) + for element in value: + encode_value(write, modify_value(element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value): + for element in value: + write(tag_bytes) + encode_value(write, modify_value(element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value): + write(tag_bytes) + return encode_value(write, modify_value(value)) + return EncodeField + + return SpecificEncoder + + +def _StructPackEncoder(wire_type, format): + """Return a constructor for an encoder for a fixed-width field. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size) + for element in value: + write(local_struct_pack(format, element)) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value): + for element in value: + write(tag_bytes) + write(local_struct_pack(format, element)) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value): + write(tag_bytes) + return write(local_struct_pack(format, value)) + return EncodeField + + return SpecificEncoder + + +def _FloatingPointEncoder(wire_type, format): + """Return a constructor for an encoder for float fields. + + This is like StructPackEncoder, but catches errors that may be due to + passing non-finite floating-point values to struct.pack, and makes a + second attempt to encode those values. + + Args: + wire_type: The field's wire type, for encoding tags. + format: The format string to pass to struct.pack(). + """ + + value_size = struct.calcsize(format) + if value_size == 4: + def EncodeNonFiniteOrRaise(write, value): + # Remember that the serialized form uses little-endian byte order. + if value == _POS_INF: + write(b'\x00\x00\x80\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x80\xFF') + elif value != value: # NaN + write(b'\x00\x00\xC0\x7F') + else: + raise + elif value_size == 8: + def EncodeNonFiniteOrRaise(write, value): + if value == _POS_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') + elif value == _NEG_INF: + write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') + elif value != value: # NaN + write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') + else: + raise + else: + raise ValueError('Can\'t encode floating-point values that are ' + '%d bytes long (only 4 or 8)' % value_size) + + def SpecificEncoder(field_number, is_repeated, is_packed): + local_struct_pack = struct.pack + if is_packed: + tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) + local_EncodeVarint = _EncodeVarint + def EncodePackedField(write, value): + write(tag_bytes) + local_EncodeVarint(write, len(value) * value_size) + for element in value: + # This try/except block is going to be faster than any code that + # we could write to check whether element is finite. + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodePackedField + elif is_repeated: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeRepeatedField(write, value): + for element in value: + write(tag_bytes) + try: + write(local_struct_pack(format, element)) + except SystemError: + EncodeNonFiniteOrRaise(write, element) + return EncodeRepeatedField + else: + tag_bytes = TagBytes(field_number, wire_type) + def EncodeField(write, value): + write(tag_bytes) + try: + write(local_struct_pack(format, value)) + except SystemError: + EncodeNonFiniteOrRaise(write, value) + return EncodeField + + return SpecificEncoder + + +# ==================================================================== +# Here we declare an encoder constructor for each field type. These work +# very similarly to sizer constructors, described earlier. + + +Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) + +UInt32Encoder = UInt64Encoder = _SimpleEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) + +SInt32Encoder = SInt64Encoder = _ModifiedEncoder( + wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, + wire_format.ZigZagEncode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '\n\x04loop\x18\x01 \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory2Message\"D\n\x19MessageWithNestedEnumOnly\"\'\n\nNestedEnum\x12\x19\n\x15NESTED_MESSAGE_ENUM_0\x10\x00*<\n\x0c\x46\x61\x63tory2Enum\x12\x15\n\x11\x46\x41\x43TORY_2_VALUE_0\x10\x00\x12\x15\n\x11\x46\x41\x43TORY_2_VALUE_1\x10\x01:H\n\ranother_field\x12\x30.google.protobuf.python.internal.Factory1Message\x18\xea\x07 \x01(\t') + , + dependencies=[google_dot_protobuf_dot_internal_dot_factory__test1__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_FACTORY2ENUM = _descriptor.EnumDescriptor( + name='Factory2Enum', + full_name='google.protobuf.python.internal.Factory2Enum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FACTORY_2_VALUE_0', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FACTORY_2_VALUE_1', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1775, + serialized_end=1835, +) +_sym_db.RegisterEnumDescriptor(_FACTORY2ENUM) + +Factory2Enum = enum_type_wrapper.EnumTypeWrapper(_FACTORY2ENUM) +FACTORY_2_VALUE_0 = 0 +FACTORY_2_VALUE_1 = 1 + +ANOTHER_FIELD_FIELD_NUMBER = 1002 +another_field = _descriptor.FieldDescriptor( + name='another_field', full_name='google.protobuf.python.internal.another_field', index=0, + number=1002, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + +_FACTORY2MESSAGE_NESTEDFACTORY2ENUM = _descriptor.EnumDescriptor( + name='NestedFactory2Enum', + full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Enum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NESTED_FACTORY_2_VALUE_0', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NESTED_FACTORY_2_VALUE_1', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1454, + serialized_end=1534, +) +_sym_db.RegisterEnumDescriptor(_FACTORY2MESSAGE_NESTEDFACTORY2ENUM) + +_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NESTED_MESSAGE_ENUM_0', index=0, number=0, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1734, + serialized_end=1773, +) +_sym_db.RegisterEnumDescriptor(_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM) + + +_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE = _descriptor.Descriptor( + name='NestedFactory2Message', + full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Message', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Message.value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1371, + serialized_end=1409, +) + +_FACTORY2MESSAGE_GROUPED = _descriptor.Descriptor( + name='Grouped', + full_name='google.protobuf.python.internal.Factory2Message.Grouped', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='part_1', full_name='google.protobuf.python.internal.Factory2Message.Grouped.part_1', index=0, + number=13, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='part_2', full_name='google.protobuf.python.internal.Factory2Message.Grouped.part_2', index=1, + number=14, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1411, + serialized_end=1452, +) + +_FACTORY2MESSAGE = _descriptor.Descriptor( + name='Factory2Message', + full_name='google.protobuf.python.internal.Factory2Message', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mandatory', full_name='google.protobuf.python.internal.Factory2Message.mandatory', index=0, + number=1, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='factory_2_enum', full_name='google.protobuf.python.internal.Factory2Message.factory_2_enum', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_factory_2_enum', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_2_enum', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_factory_2_message', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_2_message', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='factory_1_message', full_name='google.protobuf.python.internal.Factory2Message.factory_1_message', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='factory_1_enum', full_name='google.protobuf.python.internal.Factory2Message.factory_1_enum', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_factory_1_enum', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_1_enum', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_factory_1_message', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_1_message', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='circular_message', full_name='google.protobuf.python.internal.Factory2Message.circular_message', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='scalar_value', full_name='google.protobuf.python.internal.Factory2Message.scalar_value', index=9, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='list_value', full_name='google.protobuf.python.internal.Factory2Message.list_value', index=10, + number=11, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='grouped', full_name='google.protobuf.python.internal.Factory2Message.grouped', index=11, + number=12, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='loop', full_name='google.protobuf.python.internal.Factory2Message.loop', index=12, + number=15, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int_with_default', full_name='google.protobuf.python.internal.Factory2Message.int_with_default', index=13, + number=16, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=1776, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_with_default', full_name='google.protobuf.python.internal.Factory2Message.double_with_default', index=14, + number=17, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=9.99, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_with_default', full_name='google.protobuf.python.internal.Factory2Message.string_with_default', index=15, + number=18, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("hello world").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bool_with_default', full_name='google.protobuf.python.internal.Factory2Message.bool_with_default', index=16, + number=19, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='enum_with_default', full_name='google.protobuf.python.internal.Factory2Message.enum_with_default', index=17, + number=20, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bytes_with_default', full_name='google.protobuf.python.internal.Factory2Message.bytes_with_default', index=18, + number=21, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("a\373\000c"), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_int', full_name='google.protobuf.python.internal.Factory2Message.oneof_int', index=19, + number=22, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_string', full_name='google.protobuf.python.internal.Factory2Message.oneof_string', index=20, + number=23, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='one_more_field', full_name='google.protobuf.python.internal.Factory2Message.one_more_field', index=0, + number=1001, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE, _FACTORY2MESSAGE_GROUPED, ], + enum_types=[ + _FACTORY2MESSAGE_NESTEDFACTORY2ENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='oneof_field', full_name='google.protobuf.python.internal.Factory2Message.oneof_field', + index=0, containing_type=None, fields=[]), + ], + serialized_start=128, + serialized_end=1624, +) + + +_LOOPMESSAGE = _descriptor.Descriptor( + name='LoopMessage', + full_name='google.protobuf.python.internal.LoopMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='loop', full_name='google.protobuf.python.internal.LoopMessage.loop', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1626, + serialized_end=1703, +) + + +_MESSAGEWITHNESTEDENUMONLY = _descriptor.Descriptor( + name='MessageWithNestedEnumOnly', + full_name='google.protobuf.python.internal.MessageWithNestedEnumOnly', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MESSAGEWITHNESTEDENUMONLY_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1705, + serialized_end=1773, +) + +_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE.containing_type = _FACTORY2MESSAGE +_FACTORY2MESSAGE_GROUPED.containing_type = _FACTORY2MESSAGE +_FACTORY2MESSAGE.fields_by_name['factory_2_enum'].enum_type = _FACTORY2ENUM +_FACTORY2MESSAGE.fields_by_name['nested_factory_2_enum'].enum_type = _FACTORY2MESSAGE_NESTEDFACTORY2ENUM +_FACTORY2MESSAGE.fields_by_name['nested_factory_2_message'].message_type = _FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE +_FACTORY2MESSAGE.fields_by_name['factory_1_message'].message_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE +_FACTORY2MESSAGE.fields_by_name['factory_1_enum'].enum_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1ENUM +_FACTORY2MESSAGE.fields_by_name['nested_factory_1_enum'].enum_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE_NESTEDFACTORY1ENUM +_FACTORY2MESSAGE.fields_by_name['nested_factory_1_message'].message_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE +_FACTORY2MESSAGE.fields_by_name['circular_message'].message_type = _FACTORY2MESSAGE +_FACTORY2MESSAGE.fields_by_name['grouped'].message_type = _FACTORY2MESSAGE_GROUPED +_FACTORY2MESSAGE.fields_by_name['loop'].message_type = _LOOPMESSAGE +_FACTORY2MESSAGE.fields_by_name['enum_with_default'].enum_type = _FACTORY2ENUM +_FACTORY2MESSAGE_NESTEDFACTORY2ENUM.containing_type = _FACTORY2MESSAGE +_FACTORY2MESSAGE.oneofs_by_name['oneof_field'].fields.append( + _FACTORY2MESSAGE.fields_by_name['oneof_int']) +_FACTORY2MESSAGE.fields_by_name['oneof_int'].containing_oneof = _FACTORY2MESSAGE.oneofs_by_name['oneof_field'] +_FACTORY2MESSAGE.oneofs_by_name['oneof_field'].fields.append( + _FACTORY2MESSAGE.fields_by_name['oneof_string']) +_FACTORY2MESSAGE.fields_by_name['oneof_string'].containing_oneof = _FACTORY2MESSAGE.oneofs_by_name['oneof_field'] +_LOOPMESSAGE.fields_by_name['loop'].message_type = _FACTORY2MESSAGE +_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM.containing_type = _MESSAGEWITHNESTEDENUMONLY +DESCRIPTOR.message_types_by_name['Factory2Message'] = _FACTORY2MESSAGE +DESCRIPTOR.message_types_by_name['LoopMessage'] = _LOOPMESSAGE +DESCRIPTOR.message_types_by_name['MessageWithNestedEnumOnly'] = _MESSAGEWITHNESTEDENUMONLY +DESCRIPTOR.enum_types_by_name['Factory2Enum'] = _FACTORY2ENUM +DESCRIPTOR.extensions_by_name['another_field'] = another_field + +Factory2Message = _reflection.GeneratedProtocolMessageType('Factory2Message', (_message.Message,), dict( + + NestedFactory2Message = _reflection.GeneratedProtocolMessageType('NestedFactory2Message', (_message.Message,), dict( + DESCRIPTOR = _FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE, + __module__ = 'google.protobuf.internal.factory_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message.NestedFactory2Message) + )) + , + + Grouped = _reflection.GeneratedProtocolMessageType('Grouped', (_message.Message,), dict( + DESCRIPTOR = _FACTORY2MESSAGE_GROUPED, + __module__ = 'google.protobuf.internal.factory_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message.Grouped) + )) + , + DESCRIPTOR = _FACTORY2MESSAGE, + __module__ = 'google.protobuf.internal.factory_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message) + )) +_sym_db.RegisterMessage(Factory2Message) +_sym_db.RegisterMessage(Factory2Message.NestedFactory2Message) +_sym_db.RegisterMessage(Factory2Message.Grouped) + +LoopMessage = _reflection.GeneratedProtocolMessageType('LoopMessage', (_message.Message,), dict( + DESCRIPTOR = _LOOPMESSAGE, + __module__ = 'google.protobuf.internal.factory_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.LoopMessage) + )) +_sym_db.RegisterMessage(LoopMessage) + +MessageWithNestedEnumOnly = _reflection.GeneratedProtocolMessageType('MessageWithNestedEnumOnly', (_message.Message,), dict( + DESCRIPTOR = _MESSAGEWITHNESTEDENUMONLY, + __module__ = 'google.protobuf.internal.factory_test2_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.MessageWithNestedEnumOnly) + )) +_sym_db.RegisterMessage(MessageWithNestedEnumOnly) + +google_dot_protobuf_dot_internal_dot_factory__test1__pb2.Factory1Message.RegisterExtension(another_field) +google_dot_protobuf_dot_internal_dot_factory__test1__pb2.Factory1Message.RegisterExtension(_FACTORY2MESSAGE.extensions_by_name['one_more_field']) + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/generator_test.py b/deps/google/protobuf/internal/generator_test.py new file mode 100644 index 00000000..9956da59 --- /dev/null +++ b/deps/google/protobuf/internal/generator_test.py @@ -0,0 +1,347 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): Flesh this out considerably. We focused on reflection_test.py +# first, since it's testing the subtler code, and since it provides decent +# indirect testing of the protocol compiler output. + +"""Unittest that directly tests the output of the pure-Python protocol +compiler. See //google/protobuf/internal/reflection_test.py for a test which +further ensures that we can use Python protocol message objects as we expect. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf.internal import test_bad_identifiers_pb2 +from google.protobuf import unittest_custom_options_pb2 +from google.protobuf import unittest_import_pb2 +from google.protobuf import unittest_import_public_pb2 +from google.protobuf import unittest_mset_pb2 +from google.protobuf import unittest_mset_wire_format_pb2 +from google.protobuf import unittest_no_generic_services_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import service +from google.protobuf import symbol_database + +MAX_EXTENSION = 536870912 + + +class GeneratorTest(unittest.TestCase): + + def testNestedMessageDescriptor(self): + field_name = 'optional_nested_message' + proto_type = unittest_pb2.TestAllTypes + self.assertEqual( + proto_type.NestedMessage.DESCRIPTOR, + proto_type.DESCRIPTOR.fields_by_name[field_name].message_type) + + def testEnums(self): + # We test only module-level enums here. + # TODO(robinson): Examine descriptors directly to check + # enum descriptor output. + self.assertEqual(4, unittest_pb2.FOREIGN_FOO) + self.assertEqual(5, unittest_pb2.FOREIGN_BAR) + self.assertEqual(6, unittest_pb2.FOREIGN_BAZ) + + proto = unittest_pb2.TestAllTypes() + self.assertEqual(1, proto.FOO) + self.assertEqual(1, unittest_pb2.TestAllTypes.FOO) + self.assertEqual(2, proto.BAR) + self.assertEqual(2, unittest_pb2.TestAllTypes.BAR) + self.assertEqual(3, proto.BAZ) + self.assertEqual(3, unittest_pb2.TestAllTypes.BAZ) + + def testExtremeDefaultValues(self): + message = unittest_pb2.TestExtremeDefaultValues() + + # Python pre-2.6 does not have isinf() or isnan() functions, so we have + # to provide our own. + def isnan(val): + # NaN is never equal to itself. + return val != val + def isinf(val): + # Infinity times zero equals NaN. + return not isnan(val) and isnan(val * 0) + + self.assertTrue(isinf(message.inf_double)) + self.assertTrue(message.inf_double > 0) + self.assertTrue(isinf(message.neg_inf_double)) + self.assertTrue(message.neg_inf_double < 0) + self.assertTrue(isnan(message.nan_double)) + + self.assertTrue(isinf(message.inf_float)) + self.assertTrue(message.inf_float > 0) + self.assertTrue(isinf(message.neg_inf_float)) + self.assertTrue(message.neg_inf_float < 0) + self.assertTrue(isnan(message.nan_float)) + self.assertEqual("? ? ?? ?? ??? ??/ ??-", message.cpp_trigraph) + + def testHasDefaultValues(self): + desc = unittest_pb2.TestAllTypes.DESCRIPTOR + + expected_has_default_by_name = { + 'optional_int32': False, + 'repeated_int32': False, + 'optional_nested_message': False, + 'default_int32': True, + } + + has_default_by_name = dict( + [(f.name, f.has_default_value) + for f in desc.fields + if f.name in expected_has_default_by_name]) + self.assertEqual(expected_has_default_by_name, has_default_by_name) + + def testContainingTypeBehaviorForExtensions(self): + self.assertEqual(unittest_pb2.optional_int32_extension.containing_type, + unittest_pb2.TestAllExtensions.DESCRIPTOR) + self.assertEqual(unittest_pb2.TestRequired.single.containing_type, + unittest_pb2.TestAllExtensions.DESCRIPTOR) + + def testExtensionScope(self): + self.assertEqual(unittest_pb2.optional_int32_extension.extension_scope, + None) + self.assertEqual(unittest_pb2.TestRequired.single.extension_scope, + unittest_pb2.TestRequired.DESCRIPTOR) + + def testIsExtension(self): + self.assertTrue(unittest_pb2.optional_int32_extension.is_extension) + self.assertTrue(unittest_pb2.TestRequired.single.is_extension) + + message_descriptor = unittest_pb2.TestRequired.DESCRIPTOR + non_extension_descriptor = message_descriptor.fields_by_name['a'] + self.assertTrue(not non_extension_descriptor.is_extension) + + def testOptions(self): + proto = unittest_mset_wire_format_pb2.TestMessageSet() + self.assertTrue(proto.DESCRIPTOR.GetOptions().message_set_wire_format) + + def testMessageWithCustomOptions(self): + proto = unittest_custom_options_pb2.TestMessageWithCustomOptions() + enum_options = proto.DESCRIPTOR.enum_types_by_name['AnEnum'].GetOptions() + self.assertTrue(enum_options is not None) + # TODO(gps): We really should test for the presence of the enum_opt1 + # extension and for its value to be set to -789. + + def testNestedTypes(self): + self.assertEqual( + set(unittest_pb2.TestAllTypes.DESCRIPTOR.nested_types), + set([ + unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR, + unittest_pb2.TestAllTypes.OptionalGroup.DESCRIPTOR, + unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR, + ])) + self.assertEqual(unittest_pb2.TestEmptyMessage.DESCRIPTOR.nested_types, []) + self.assertEqual( + unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.nested_types, []) + + def testContainingType(self): + self.assertTrue( + unittest_pb2.TestEmptyMessage.DESCRIPTOR.containing_type is None) + self.assertTrue( + unittest_pb2.TestAllTypes.DESCRIPTOR.containing_type is None) + self.assertEqual( + unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type, + unittest_pb2.TestAllTypes.DESCRIPTOR) + self.assertEqual( + unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type, + unittest_pb2.TestAllTypes.DESCRIPTOR) + self.assertEqual( + unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR.containing_type, + unittest_pb2.TestAllTypes.DESCRIPTOR) + + def testContainingTypeInEnumDescriptor(self): + self.assertTrue(unittest_pb2._FOREIGNENUM.containing_type is None) + self.assertEqual(unittest_pb2._TESTALLTYPES_NESTEDENUM.containing_type, + unittest_pb2.TestAllTypes.DESCRIPTOR) + + def testPackage(self): + self.assertEqual( + unittest_pb2.TestAllTypes.DESCRIPTOR.file.package, + 'protobuf_unittest') + desc = unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR + self.assertEqual(desc.file.package, 'protobuf_unittest') + self.assertEqual( + unittest_import_pb2.ImportMessage.DESCRIPTOR.file.package, + 'protobuf_unittest_import') + + self.assertEqual( + unittest_pb2._FOREIGNENUM.file.package, 'protobuf_unittest') + self.assertEqual( + unittest_pb2._TESTALLTYPES_NESTEDENUM.file.package, + 'protobuf_unittest') + self.assertEqual( + unittest_import_pb2._IMPORTENUM.file.package, + 'protobuf_unittest_import') + + def testExtensionRange(self): + self.assertEqual( + unittest_pb2.TestAllTypes.DESCRIPTOR.extension_ranges, []) + self.assertEqual( + unittest_pb2.TestAllExtensions.DESCRIPTOR.extension_ranges, + [(1, MAX_EXTENSION)]) + self.assertEqual( + unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR.extension_ranges, + [(42, 43), (4143, 4244), (65536, MAX_EXTENSION)]) + + def testFileDescriptor(self): + self.assertEqual(unittest_pb2.DESCRIPTOR.name, + 'google/protobuf/unittest.proto') + self.assertEqual(unittest_pb2.DESCRIPTOR.package, 'protobuf_unittest') + self.assertFalse(unittest_pb2.DESCRIPTOR.serialized_pb is None) + self.assertEqual(unittest_pb2.DESCRIPTOR.dependencies, + [unittest_import_pb2.DESCRIPTOR]) + self.assertEqual(unittest_import_pb2.DESCRIPTOR.dependencies, + [unittest_import_public_pb2.DESCRIPTOR]) + + def testNoGenericServices(self): + self.assertTrue(hasattr(unittest_no_generic_services_pb2, "TestMessage")) + self.assertTrue(hasattr(unittest_no_generic_services_pb2, "FOO")) + self.assertTrue(hasattr(unittest_no_generic_services_pb2, "test_extension")) + + # Make sure unittest_no_generic_services_pb2 has no services subclassing + # Proto2 Service class. + if hasattr(unittest_no_generic_services_pb2, "TestService"): + self.assertFalse(issubclass(unittest_no_generic_services_pb2.TestService, + service.Service)) + + def testMessageTypesByName(self): + file_type = unittest_pb2.DESCRIPTOR + self.assertEqual( + unittest_pb2._TESTALLTYPES, + file_type.message_types_by_name[unittest_pb2._TESTALLTYPES.name]) + + # Nested messages shouldn't be included in the message_types_by_name + # dictionary (like in the C++ API). + self.assertFalse( + unittest_pb2._TESTALLTYPES_NESTEDMESSAGE.name in + file_type.message_types_by_name) + + def testEnumTypesByName(self): + file_type = unittest_pb2.DESCRIPTOR + self.assertEqual( + unittest_pb2._FOREIGNENUM, + file_type.enum_types_by_name[unittest_pb2._FOREIGNENUM.name]) + + def testExtensionsByName(self): + file_type = unittest_pb2.DESCRIPTOR + self.assertEqual( + unittest_pb2.my_extension_string, + file_type.extensions_by_name[unittest_pb2.my_extension_string.name]) + + def testPublicImports(self): + # Test public imports as embedded message. + all_type_proto = unittest_pb2.TestAllTypes() + self.assertEqual(0, all_type_proto.optional_public_import_message.e) + + # PublicImportMessage is actually defined in unittest_import_public_pb2 + # module, and is public imported by unittest_import_pb2 module. + public_import_proto = unittest_import_pb2.PublicImportMessage() + self.assertEqual(0, public_import_proto.e) + self.assertTrue(unittest_import_public_pb2.PublicImportMessage is + unittest_import_pb2.PublicImportMessage) + + def testBadIdentifiers(self): + # We're just testing that the code was imported without problems. + message = test_bad_identifiers_pb2.TestBadIdentifiers() + self.assertEqual(message.Extensions[test_bad_identifiers_pb2.message], + "foo") + self.assertEqual(message.Extensions[test_bad_identifiers_pb2.descriptor], + "bar") + self.assertEqual(message.Extensions[test_bad_identifiers_pb2.reflection], + "baz") + self.assertEqual(message.Extensions[test_bad_identifiers_pb2.service], + "qux") + + def testOneof(self): + desc = unittest_pb2.TestAllTypes.DESCRIPTOR + self.assertEqual(1, len(desc.oneofs)) + self.assertEqual('oneof_field', desc.oneofs[0].name) + self.assertEqual(0, desc.oneofs[0].index) + self.assertIs(desc, desc.oneofs[0].containing_type) + self.assertIs(desc.oneofs[0], desc.oneofs_by_name['oneof_field']) + nested_names = set(['oneof_uint32', 'oneof_nested_message', + 'oneof_string', 'oneof_bytes']) + self.assertEqual( + nested_names, + set([field.name for field in desc.oneofs[0].fields])) + for field_name, field_desc in desc.fields_by_name.items(): + if field_name in nested_names: + self.assertIs(desc.oneofs[0], field_desc.containing_oneof) + else: + self.assertIsNone(field_desc.containing_oneof) + + +class SymbolDatabaseRegistrationTest(unittest.TestCase): + """Checks that messages, enums and files are correctly registered.""" + + def testGetSymbol(self): + self.assertEqual( + unittest_pb2.TestAllTypes, symbol_database.Default().GetSymbol( + 'protobuf_unittest.TestAllTypes')) + self.assertEqual( + unittest_pb2.TestAllTypes.NestedMessage, + symbol_database.Default().GetSymbol( + 'protobuf_unittest.TestAllTypes.NestedMessage')) + with self.assertRaises(KeyError): + symbol_database.Default().GetSymbol('protobuf_unittest.NestedMessage') + self.assertEqual( + unittest_pb2.TestAllTypes.OptionalGroup, + symbol_database.Default().GetSymbol( + 'protobuf_unittest.TestAllTypes.OptionalGroup')) + self.assertEqual( + unittest_pb2.TestAllTypes.RepeatedGroup, + symbol_database.Default().GetSymbol( + 'protobuf_unittest.TestAllTypes.RepeatedGroup')) + + def testEnums(self): + self.assertEqual( + 'protobuf_unittest.ForeignEnum', + symbol_database.Default().pool.FindEnumTypeByName( + 'protobuf_unittest.ForeignEnum').full_name) + self.assertEqual( + 'protobuf_unittest.TestAllTypes.NestedEnum', + symbol_database.Default().pool.FindEnumTypeByName( + 'protobuf_unittest.TestAllTypes.NestedEnum').full_name) + + def testFindFileByName(self): + self.assertEqual( + 'google/protobuf/unittest.proto', + symbol_database.Default().pool.FindFileByName( + 'google/protobuf/unittest.proto').name) + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/import_test_package/__init__.py b/deps/google/protobuf/internal/import_test_package/__init__.py new file mode 100644 index 00000000..5121dd0e --- /dev/null +++ b/deps/google/protobuf/internal/import_test_package/__init__.py @@ -0,0 +1,33 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Sample module importing a nested proto from itself.""" + +from google.protobuf.internal.import_test_package import outer_pb2 as myproto diff --git a/deps/google/protobuf/internal/import_test_package/__pycache__/__init__.cpython-34.pyc b/deps/google/protobuf/internal/import_test_package/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..db65e244 Binary files /dev/null and b/deps/google/protobuf/internal/import_test_package/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/import_test_package/__pycache__/inner_pb2.cpython-34.pyc b/deps/google/protobuf/internal/import_test_package/__pycache__/inner_pb2.cpython-34.pyc new file mode 100644 index 00000000..3c749f73 Binary files /dev/null and b/deps/google/protobuf/internal/import_test_package/__pycache__/inner_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/import_test_package/__pycache__/outer_pb2.cpython-34.pyc b/deps/google/protobuf/internal/import_test_package/__pycache__/outer_pb2.cpython-34.pyc new file mode 100644 index 00000000..665f9636 Binary files /dev/null and b/deps/google/protobuf/internal/import_test_package/__pycache__/outer_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/internal/import_test_package/inner_pb2.py b/deps/google/protobuf/internal/import_test_package/inner_pb2.py new file mode 100644 index 00000000..e45c21b7 --- /dev/null +++ b/deps/google/protobuf/internal/import_test_package/inner_pb2.py @@ -0,0 +1,69 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/import_test_package/inner.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/import_test_package/inner.proto', + package='google.protobuf.python.internal.import_test_package', + syntax='proto2', + serialized_pb=_b('\n8google/protobuf/internal/import_test_package/inner.proto\x12\x33google.protobuf.python.internal.import_test_package\"\x1a\n\x05Inner\x12\x11\n\x05value\x18\x01 \x01(\x05:\x02\x35\x37') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_INNER = _descriptor.Descriptor( + name='Inner', + full_name='google.protobuf.python.internal.import_test_package.Inner', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.python.internal.import_test_package.Inner.value', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=57, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=139, +) + +DESCRIPTOR.message_types_by_name['Inner'] = _INNER + +Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), dict( + DESCRIPTOR = _INNER, + __module__ = 'google.protobuf.internal.import_test_package.inner_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.import_test_package.Inner) + )) +_sym_db.RegisterMessage(Inner) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/import_test_package/outer_pb2.py b/deps/google/protobuf/internal/import_test_package/outer_pb2.py new file mode 100644 index 00000000..15f72f4f --- /dev/null +++ b/deps/google/protobuf/internal/import_test_package/outer_pb2.py @@ -0,0 +1,72 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/import_test_package/outer.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal.import_test_package import inner_pb2 as google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/import_test_package/outer.proto', + package='google.protobuf.python.internal.import_test_package', + syntax='proto2', + serialized_pb=_b('\n8google/protobuf/internal/import_test_package/outer.proto\x12\x33google.protobuf.python.internal.import_test_package\x1a\x38google/protobuf/internal/import_test_package/inner.proto\"R\n\x05Outer\x12I\n\x05inner\x18\x01 \x01(\x0b\x32:.google.protobuf.python.internal.import_test_package.Inner') + , + dependencies=[google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_OUTER = _descriptor.Descriptor( + name='Outer', + full_name='google.protobuf.python.internal.import_test_package.Outer', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='inner', full_name='google.protobuf.python.internal.import_test_package.Outer.inner', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=171, + serialized_end=253, +) + +_OUTER.fields_by_name['inner'].message_type = google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2._INNER +DESCRIPTOR.message_types_by_name['Outer'] = _OUTER + +Outer = _reflection.GeneratedProtocolMessageType('Outer', (_message.Message,), dict( + DESCRIPTOR = _OUTER, + __module__ = 'google.protobuf.internal.import_test_package.outer_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.import_test_package.Outer) + )) +_sym_db.RegisterMessage(Outer) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/json_format_test.py b/deps/google/protobuf/internal/json_format_test.py new file mode 100644 index 00000000..be3ad11a --- /dev/null +++ b/deps/google/protobuf/internal/json_format_test.py @@ -0,0 +1,534 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Test for google.protobuf.json_format.""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import json +import math +import sys + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf.internal import well_known_types +from google.protobuf import json_format +from google.protobuf.util import json_format_proto3_pb2 + + +class JsonFormatBase(unittest.TestCase): + + def FillAllFields(self, message): + message.int32_value = 20 + message.int64_value = -20 + message.uint32_value = 3120987654 + message.uint64_value = 12345678900 + message.float_value = float('-inf') + message.double_value = 3.1415 + message.bool_value = True + message.string_value = 'foo' + message.bytes_value = b'bar' + message.message_value.value = 10 + message.enum_value = json_format_proto3_pb2.BAR + # Repeated + message.repeated_int32_value.append(0x7FFFFFFF) + message.repeated_int32_value.append(-2147483648) + message.repeated_int64_value.append(9007199254740992) + message.repeated_int64_value.append(-9007199254740992) + message.repeated_uint32_value.append(0xFFFFFFF) + message.repeated_uint32_value.append(0x7FFFFFF) + message.repeated_uint64_value.append(9007199254740992) + message.repeated_uint64_value.append(9007199254740991) + message.repeated_float_value.append(0) + + message.repeated_double_value.append(1E-15) + message.repeated_double_value.append(float('inf')) + message.repeated_bool_value.append(True) + message.repeated_bool_value.append(False) + message.repeated_string_value.append('Few symbols!#$,;') + message.repeated_string_value.append('bar') + message.repeated_bytes_value.append(b'foo') + message.repeated_bytes_value.append(b'bar') + message.repeated_message_value.add().value = 10 + message.repeated_message_value.add().value = 11 + message.repeated_enum_value.append(json_format_proto3_pb2.FOO) + message.repeated_enum_value.append(json_format_proto3_pb2.BAR) + self.message = message + + def CheckParseBack(self, message, parsed_message): + json_format.Parse(json_format.MessageToJson(message), + parsed_message) + self.assertEqual(message, parsed_message) + + def CheckError(self, text, error_message): + message = json_format_proto3_pb2.TestMessage() + self.assertRaisesRegexp( + json_format.ParseError, + error_message, + json_format.Parse, text, message) + + +class JsonFormatTest(JsonFormatBase): + + def testEmptyMessageToJson(self): + message = json_format_proto3_pb2.TestMessage() + self.assertEqual(json_format.MessageToJson(message), + '{}') + parsed_message = json_format_proto3_pb2.TestMessage() + self.CheckParseBack(message, parsed_message) + + def testPartialMessageToJson(self): + message = json_format_proto3_pb2.TestMessage( + string_value='test', + repeated_int32_value=[89, 4]) + self.assertEqual(json.loads(json_format.MessageToJson(message)), + json.loads('{"stringValue": "test", ' + '"repeatedInt32Value": [89, 4]}')) + parsed_message = json_format_proto3_pb2.TestMessage() + self.CheckParseBack(message, parsed_message) + + def testAllFieldsToJson(self): + message = json_format_proto3_pb2.TestMessage() + text = ('{"int32Value": 20, ' + '"int64Value": "-20", ' + '"uint32Value": 3120987654,' + '"uint64Value": "12345678900",' + '"floatValue": "-Infinity",' + '"doubleValue": 3.1415,' + '"boolValue": true,' + '"stringValue": "foo",' + '"bytesValue": "YmFy",' + '"messageValue": {"value": 10},' + '"enumValue": "BAR",' + '"repeatedInt32Value": [2147483647, -2147483648],' + '"repeatedInt64Value": ["9007199254740992", "-9007199254740992"],' + '"repeatedUint32Value": [268435455, 134217727],' + '"repeatedUint64Value": ["9007199254740992", "9007199254740991"],' + '"repeatedFloatValue": [0],' + '"repeatedDoubleValue": [1e-15, "Infinity"],' + '"repeatedBoolValue": [true, false],' + '"repeatedStringValue": ["Few symbols!#$,;", "bar"],' + '"repeatedBytesValue": ["Zm9v", "YmFy"],' + '"repeatedMessageValue": [{"value": 10}, {"value": 11}],' + '"repeatedEnumValue": ["FOO", "BAR"]' + '}') + self.FillAllFields(message) + self.assertEqual( + json.loads(json_format.MessageToJson(message)), + json.loads(text)) + parsed_message = json_format_proto3_pb2.TestMessage() + json_format.Parse(text, parsed_message) + self.assertEqual(message, parsed_message) + + def testJsonEscapeString(self): + message = json_format_proto3_pb2.TestMessage() + if sys.version_info[0] < 3: + message.string_value = '&\n<\"\r>\b\t\f\\\001/\xe2\x80\xa8\xe2\x80\xa9' + else: + message.string_value = '&\n<\"\r>\b\t\f\\\001/' + message.string_value += (b'\xe2\x80\xa8\xe2\x80\xa9').decode('utf-8') + self.assertEqual( + json_format.MessageToJson(message), + '{\n "stringValue": ' + '"&\\n<\\\"\\r>\\b\\t\\f\\\\\\u0001/\\u2028\\u2029"\n}') + parsed_message = json_format_proto3_pb2.TestMessage() + self.CheckParseBack(message, parsed_message) + text = u'{"int32Value": "\u0031"}' + json_format.Parse(text, message) + self.assertEqual(message.int32_value, 1) + + def testAlwaysSeriliaze(self): + message = json_format_proto3_pb2.TestMessage( + string_value='foo') + self.assertEqual( + json.loads(json_format.MessageToJson(message, True)), + json.loads('{' + '"repeatedStringValue": [],' + '"stringValue": "foo",' + '"repeatedBoolValue": [],' + '"repeatedUint32Value": [],' + '"repeatedInt32Value": [],' + '"enumValue": "FOO",' + '"int32Value": 0,' + '"floatValue": 0,' + '"int64Value": "0",' + '"uint32Value": 0,' + '"repeatedBytesValue": [],' + '"repeatedUint64Value": [],' + '"repeatedDoubleValue": [],' + '"bytesValue": "",' + '"boolValue": false,' + '"repeatedEnumValue": [],' + '"uint64Value": "0",' + '"doubleValue": 0,' + '"repeatedFloatValue": [],' + '"repeatedInt64Value": [],' + '"repeatedMessageValue": []}')) + parsed_message = json_format_proto3_pb2.TestMessage() + self.CheckParseBack(message, parsed_message) + + def testMapFields(self): + message = json_format_proto3_pb2.TestMap() + message.bool_map[True] = 1 + message.bool_map[False] = 2 + message.int32_map[1] = 2 + message.int32_map[2] = 3 + message.int64_map[1] = 2 + message.int64_map[2] = 3 + message.uint32_map[1] = 2 + message.uint32_map[2] = 3 + message.uint64_map[1] = 2 + message.uint64_map[2] = 3 + message.string_map['1'] = 2 + message.string_map['null'] = 3 + self.assertEqual( + json.loads(json_format.MessageToJson(message, True)), + json.loads('{' + '"boolMap": {"false": 2, "true": 1},' + '"int32Map": {"1": 2, "2": 3},' + '"int64Map": {"1": 2, "2": 3},' + '"uint32Map": {"1": 2, "2": 3},' + '"uint64Map": {"1": 2, "2": 3},' + '"stringMap": {"1": 2, "null": 3}' + '}')) + parsed_message = json_format_proto3_pb2.TestMap() + self.CheckParseBack(message, parsed_message) + + def testOneofFields(self): + message = json_format_proto3_pb2.TestOneof() + # Always print does not affect oneof fields. + self.assertEqual( + json_format.MessageToJson(message, True), + '{}') + message.oneof_int32_value = 0 + self.assertEqual( + json_format.MessageToJson(message, True), + '{\n' + ' "oneofInt32Value": 0\n' + '}') + parsed_message = json_format_proto3_pb2.TestOneof() + self.CheckParseBack(message, parsed_message) + + def testTimestampMessage(self): + message = json_format_proto3_pb2.TestTimestamp() + message.value.seconds = 0 + message.value.nanos = 0 + message.repeated_value.add().seconds = 20 + message.repeated_value[0].nanos = 1 + message.repeated_value.add().seconds = 0 + message.repeated_value[1].nanos = 10000 + message.repeated_value.add().seconds = 100000000 + message.repeated_value[2].nanos = 0 + # Maximum time + message.repeated_value.add().seconds = 253402300799 + message.repeated_value[3].nanos = 999999999 + # Minimum time + message.repeated_value.add().seconds = -62135596800 + message.repeated_value[4].nanos = 0 + self.assertEqual( + json.loads(json_format.MessageToJson(message, True)), + json.loads('{' + '"value": "1970-01-01T00:00:00Z",' + '"repeatedValue": [' + ' "1970-01-01T00:00:20.000000001Z",' + ' "1970-01-01T00:00:00.000010Z",' + ' "1973-03-03T09:46:40Z",' + ' "9999-12-31T23:59:59.999999999Z",' + ' "0001-01-01T00:00:00Z"' + ']' + '}')) + parsed_message = json_format_proto3_pb2.TestTimestamp() + self.CheckParseBack(message, parsed_message) + text = (r'{"value": "1970-01-01T00:00:00.01+08:00",' + r'"repeatedValue":[' + r' "1970-01-01T00:00:00.01+08:30",' + r' "1970-01-01T00:00:00.01-01:23"]}') + json_format.Parse(text, parsed_message) + self.assertEqual(parsed_message.value.seconds, -8 * 3600) + self.assertEqual(parsed_message.value.nanos, 10000000) + self.assertEqual(parsed_message.repeated_value[0].seconds, -8.5 * 3600) + self.assertEqual(parsed_message.repeated_value[1].seconds, 3600 + 23 * 60) + + def testDurationMessage(self): + message = json_format_proto3_pb2.TestDuration() + message.value.seconds = 1 + message.repeated_value.add().seconds = 0 + message.repeated_value[0].nanos = 10 + message.repeated_value.add().seconds = -1 + message.repeated_value[1].nanos = -1000 + message.repeated_value.add().seconds = 10 + message.repeated_value[2].nanos = 11000000 + message.repeated_value.add().seconds = -315576000000 + message.repeated_value.add().seconds = 315576000000 + self.assertEqual( + json.loads(json_format.MessageToJson(message, True)), + json.loads('{' + '"value": "1s",' + '"repeatedValue": [' + ' "0.000000010s",' + ' "-1.000001s",' + ' "10.011s",' + ' "-315576000000s",' + ' "315576000000s"' + ']' + '}')) + parsed_message = json_format_proto3_pb2.TestDuration() + self.CheckParseBack(message, parsed_message) + + def testFieldMaskMessage(self): + message = json_format_proto3_pb2.TestFieldMask() + message.value.paths.append('foo.bar') + message.value.paths.append('bar') + self.assertEqual( + json_format.MessageToJson(message, True), + '{\n' + ' "value": "foo.bar,bar"\n' + '}') + parsed_message = json_format_proto3_pb2.TestFieldMask() + self.CheckParseBack(message, parsed_message) + + def testWrapperMessage(self): + message = json_format_proto3_pb2.TestWrapper() + message.bool_value.value = False + message.int32_value.value = 0 + message.string_value.value = '' + message.bytes_value.value = b'' + message.repeated_bool_value.add().value = True + message.repeated_bool_value.add().value = False + self.assertEqual( + json.loads(json_format.MessageToJson(message, True)), + json.loads('{\n' + ' "int32Value": 0,' + ' "boolValue": false,' + ' "stringValue": "",' + ' "bytesValue": "",' + ' "repeatedBoolValue": [true, false],' + ' "repeatedInt32Value": [],' + ' "repeatedUint32Value": [],' + ' "repeatedFloatValue": [],' + ' "repeatedDoubleValue": [],' + ' "repeatedBytesValue": [],' + ' "repeatedInt64Value": [],' + ' "repeatedUint64Value": [],' + ' "repeatedStringValue": []' + '}')) + parsed_message = json_format_proto3_pb2.TestWrapper() + self.CheckParseBack(message, parsed_message) + + def testParseNull(self): + message = json_format_proto3_pb2.TestMessage() + message.repeated_int32_value.append(1) + message.repeated_int32_value.append(2) + message.repeated_int32_value.append(3) + parsed_message = json_format_proto3_pb2.TestMessage() + self.FillAllFields(parsed_message) + json_format.Parse('{"int32Value": null, ' + '"int64Value": null, ' + '"uint32Value": null,' + '"uint64Value": null,' + '"floatValue": null,' + '"doubleValue": null,' + '"boolValue": null,' + '"stringValue": null,' + '"bytesValue": null,' + '"messageValue": null,' + '"enumValue": null,' + '"repeatedInt32Value": [1, 2, null, 3],' + '"repeatedInt64Value": null,' + '"repeatedUint32Value": null,' + '"repeatedUint64Value": null,' + '"repeatedFloatValue": null,' + '"repeatedDoubleValue": null,' + '"repeatedBoolValue": null,' + '"repeatedStringValue": null,' + '"repeatedBytesValue": null,' + '"repeatedMessageValue": null,' + '"repeatedEnumValue": null' + '}', + parsed_message) + self.assertEqual(message, parsed_message) + + def testNanFloat(self): + message = json_format_proto3_pb2.TestMessage() + message.float_value = float('nan') + text = '{\n "floatValue": "NaN"\n}' + self.assertEqual(json_format.MessageToJson(message), text) + parsed_message = json_format_proto3_pb2.TestMessage() + json_format.Parse(text, parsed_message) + self.assertTrue(math.isnan(parsed_message.float_value)) + + def testParseEmptyText(self): + self.CheckError('', + r'Failed to load JSON: (Expecting value)|(No JSON).') + + def testParseBadEnumValue(self): + self.CheckError( + '{"enumValue": 1}', + 'Enum value must be a string literal with double quotes. ' + 'Type "proto3.EnumType" has no value named 1.') + self.CheckError( + '{"enumValue": "baz"}', + 'Enum value must be a string literal with double quotes. ' + 'Type "proto3.EnumType" has no value named baz.') + + def testParseBadIdentifer(self): + self.CheckError('{int32Value: 1}', + (r'Failed to load JSON: Expecting property name' + r'( enclosed in double quotes)?: line 1')) + self.CheckError('{"unknownName": 1}', + 'Message type "proto3.TestMessage" has no field named ' + '"unknownName".') + + def testDuplicateField(self): + # Duplicate key check is not supported for python2.6 + if sys.version_info < (2, 7): + return + self.CheckError('{"int32Value": 1,\n"int32Value":2}', + 'Failed to load JSON: duplicate key int32Value.') + + def testInvalidBoolValue(self): + self.CheckError('{"boolValue": 1}', + 'Failed to parse boolValue field: ' + 'Expected true or false without quotes.') + self.CheckError('{"boolValue": "true"}', + 'Failed to parse boolValue field: ' + 'Expected true or false without quotes.') + + def testInvalidIntegerValue(self): + message = json_format_proto3_pb2.TestMessage() + text = '{"int32Value": 0x12345}' + self.assertRaises(json_format.ParseError, + json_format.Parse, text, message) + self.CheckError('{"int32Value": 012345}', + (r'Failed to load JSON: Expecting \'?,\'? delimiter: ' + r'line 1.')) + self.CheckError('{"int32Value": 1.0}', + 'Failed to parse int32Value field: ' + 'Couldn\'t parse integer: 1.0.') + self.CheckError('{"int32Value": " 1 "}', + 'Failed to parse int32Value field: ' + 'Couldn\'t parse integer: " 1 ".') + self.CheckError('{"int32Value": "1 "}', + 'Failed to parse int32Value field: ' + 'Couldn\'t parse integer: "1 ".') + self.CheckError('{"int32Value": 12345678901234567890}', + 'Failed to parse int32Value field: Value out of range: ' + '12345678901234567890.') + self.CheckError('{"int32Value": 1e5}', + 'Failed to parse int32Value field: ' + 'Couldn\'t parse integer: 100000.0.') + self.CheckError('{"uint32Value": -1}', + 'Failed to parse uint32Value field: ' + 'Value out of range: -1.') + + def testInvalidFloatValue(self): + self.CheckError('{"floatValue": "nan"}', + 'Failed to parse floatValue field: Couldn\'t ' + 'parse float "nan", use "NaN" instead.') + + def testInvalidBytesValue(self): + self.CheckError('{"bytesValue": "AQI"}', + 'Failed to parse bytesValue field: Incorrect padding.') + self.CheckError('{"bytesValue": "AQI*"}', + 'Failed to parse bytesValue field: Incorrect padding.') + + def testInvalidMap(self): + message = json_format_proto3_pb2.TestMap() + text = '{"int32Map": {"null": 2, "2": 3}}' + self.assertRaisesRegexp( + json_format.ParseError, + 'Failed to parse int32Map field: invalid literal', + json_format.Parse, text, message) + text = '{"int32Map": {1: 2, "2": 3}}' + self.assertRaisesRegexp( + json_format.ParseError, + (r'Failed to load JSON: Expecting property name' + r'( enclosed in double quotes)?: line 1'), + json_format.Parse, text, message) + text = '{"boolMap": {"null": 1}}' + self.assertRaisesRegexp( + json_format.ParseError, + 'Failed to parse boolMap field: Expected "true" or "false", not null.', + json_format.Parse, text, message) + if sys.version_info < (2, 7): + return + text = r'{"stringMap": {"a": 3, "\u0061": 2}}' + self.assertRaisesRegexp( + json_format.ParseError, + 'Failed to load JSON: duplicate key a', + json_format.Parse, text, message) + + def testInvalidTimestamp(self): + message = json_format_proto3_pb2.TestTimestamp() + text = '{"value": "10000-01-01T00:00:00.00Z"}' + self.assertRaisesRegexp( + json_format.ParseError, + 'time data \'10000-01-01T00:00:00\' does not match' + ' format \'%Y-%m-%dT%H:%M:%S\'.', + json_format.Parse, text, message) + text = '{"value": "1970-01-01T00:00:00.0123456789012Z"}' + self.assertRaisesRegexp( + well_known_types.ParseError, + 'nanos 0123456789012 more than 9 fractional digits.', + json_format.Parse, text, message) + text = '{"value": "1972-01-01T01:00:00.01+08"}' + self.assertRaisesRegexp( + well_known_types.ParseError, + (r'Invalid timezone offset value: \+08.'), + json_format.Parse, text, message) + # Time smaller than minimum time. + text = '{"value": "0000-01-01T00:00:00Z"}' + self.assertRaisesRegexp( + json_format.ParseError, + 'Failed to parse value field: year is out of range.', + json_format.Parse, text, message) + # Time bigger than maxinum time. + message.value.seconds = 253402300800 + self.assertRaisesRegexp( + OverflowError, + 'date value out of range', + json_format.MessageToJson, message) + + def testInvalidOneof(self): + message = json_format_proto3_pb2.TestOneof() + text = '{"oneofInt32Value": 1, "oneofStringValue": "2"}' + self.assertRaisesRegexp( + json_format.ParseError, + 'Message type "proto3.TestOneof"' + ' should not have multiple "oneof_value" oneof fields.', + json_format.Parse, text, message) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/message_factory_test.py b/deps/google/protobuf/internal/message_factory_test.py new file mode 100644 index 00000000..2fbe5ea7 --- /dev/null +++ b/deps/google/protobuf/internal/message_factory_test.py @@ -0,0 +1,135 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests for google.protobuf.message_factory.""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import descriptor_pb2 +from google.protobuf.internal import factory_test1_pb2 +from google.protobuf.internal import factory_test2_pb2 +from google.protobuf import descriptor_database +from google.protobuf import descriptor_pool +from google.protobuf import message_factory + + +class MessageFactoryTest(unittest.TestCase): + + def setUp(self): + self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( + factory_test1_pb2.DESCRIPTOR.serialized_pb) + self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( + factory_test2_pb2.DESCRIPTOR.serialized_pb) + + def _ExerciseDynamicClass(self, cls): + msg = cls() + msg.mandatory = 42 + msg.nested_factory_2_enum = 0 + msg.nested_factory_2_message.value = 'nested message value' + msg.factory_1_message.factory_1_enum = 1 + msg.factory_1_message.nested_factory_1_enum = 0 + msg.factory_1_message.nested_factory_1_message.value = ( + 'nested message value') + msg.factory_1_message.scalar_value = 22 + msg.factory_1_message.list_value.extend([u'one', u'two', u'three']) + msg.factory_1_message.list_value.append(u'four') + msg.factory_1_enum = 1 + msg.nested_factory_1_enum = 0 + msg.nested_factory_1_message.value = 'nested message value' + msg.circular_message.mandatory = 1 + msg.circular_message.circular_message.mandatory = 2 + msg.circular_message.scalar_value = 'one deep' + msg.scalar_value = 'zero deep' + msg.list_value.extend([u'four', u'three', u'two']) + msg.list_value.append(u'one') + msg.grouped.add() + msg.grouped[0].part_1 = 'hello' + msg.grouped[0].part_2 = 'world' + msg.grouped.add(part_1='testing', part_2='123') + msg.loop.loop.mandatory = 2 + msg.loop.loop.loop.loop.mandatory = 4 + serialized = msg.SerializeToString() + converted = factory_test2_pb2.Factory2Message.FromString(serialized) + reserialized = converted.SerializeToString() + self.assertEqual(serialized, reserialized) + result = cls.FromString(reserialized) + self.assertEqual(msg, result) + + def testGetPrototype(self): + db = descriptor_database.DescriptorDatabase() + pool = descriptor_pool.DescriptorPool(db) + db.Add(self.factory_test1_fd) + db.Add(self.factory_test2_fd) + factory = message_factory.MessageFactory() + cls = factory.GetPrototype(pool.FindMessageTypeByName( + 'google.protobuf.python.internal.Factory2Message')) + self.assertFalse(cls is factory_test2_pb2.Factory2Message) + self._ExerciseDynamicClass(cls) + cls2 = factory.GetPrototype(pool.FindMessageTypeByName( + 'google.protobuf.python.internal.Factory2Message')) + self.assertTrue(cls is cls2) + + def testGetMessages(self): + # performed twice because multiple calls with the same input must be allowed + for _ in range(2): + messages = message_factory.GetMessages([self.factory_test1_fd, + self.factory_test2_fd]) + self.assertTrue( + set(['google.protobuf.python.internal.Factory2Message', + 'google.protobuf.python.internal.Factory1Message'], + ).issubset(set(messages.keys()))) + self._ExerciseDynamicClass( + messages['google.protobuf.python.internal.Factory2Message']) + self.assertTrue( + set(['google.protobuf.python.internal.Factory2Message.one_more_field', + 'google.protobuf.python.internal.another_field'], + ).issubset( + set(messages['google.protobuf.python.internal.Factory1Message'] + ._extensions_by_name.keys()))) + factory_msg1 = messages['google.protobuf.python.internal.Factory1Message'] + msg1 = messages['google.protobuf.python.internal.Factory1Message']() + ext1 = factory_msg1._extensions_by_name[ + 'google.protobuf.python.internal.Factory2Message.one_more_field'] + ext2 = factory_msg1._extensions_by_name[ + 'google.protobuf.python.internal.another_field'] + msg1.Extensions[ext1] = 'test1' + msg1.Extensions[ext2] = 'test2' + self.assertEqual('test1', msg1.Extensions[ext1]) + self.assertEqual('test2', msg1.Extensions[ext2]) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/message_listener.py b/deps/google/protobuf/internal/message_listener.py new file mode 100644 index 00000000..0fc255a7 --- /dev/null +++ b/deps/google/protobuf/internal/message_listener.py @@ -0,0 +1,78 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/deps/google/protobuf/internal/message_set_extensions_pb2.py b/deps/google/protobuf/internal/message_set_extensions_pb2.py new file mode 100644 index 00000000..4c9b475a --- /dev/null +++ b/deps/google/protobuf/internal/message_set_extensions_pb2.py @@ -0,0 +1,210 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/message_set_extensions.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/message_set_extensions.proto', + package='google.protobuf.internal', + syntax='proto2', + serialized_pb=_b('\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +MESSAGE_SET_EXTENSION3_FIELD_NUMBER = 98418655 +message_set_extension3 = _descriptor.FieldDescriptor( + name='message_set_extension3', full_name='google.protobuf.internal.message_set_extension3', index=0, + number=98418655, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_TESTMESSAGESET = _descriptor.Descriptor( + name='TestMessageSet', + full_name='google.protobuf.internal.TestMessageSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')), + is_extendable=True, + syntax='proto2', + extension_ranges=[(4, 2147483647), ], + oneofs=[ + ], + serialized_start=83, + serialized_end=113, +) + + +_TESTMESSAGESETEXTENSION1 = _descriptor.Descriptor( + name='TestMessageSetExtension1', + full_name='google.protobuf.internal.TestMessageSetExtension1', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='i', full_name='google.protobuf.internal.TestMessageSetExtension1.i', index=0, + number=15, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='message_set_extension', full_name='google.protobuf.internal.TestMessageSetExtension1.message_set_extension', index=0, + number=98418603, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=116, + serialized_end=281, +) + + +_TESTMESSAGESETEXTENSION2 = _descriptor.Descriptor( + name='TestMessageSetExtension2', + full_name='google.protobuf.internal.TestMessageSetExtension2', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='str', full_name='google.protobuf.internal.TestMessageSetExtension2.str', index=0, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='message_set_extension', full_name='google.protobuf.internal.TestMessageSetExtension2.message_set_extension', index=0, + number=98418634, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=284, + serialized_end=451, +) + + +_TESTMESSAGESETEXTENSION3 = _descriptor.Descriptor( + name='TestMessageSetExtension3', + full_name='google.protobuf.internal.TestMessageSetExtension3', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.protobuf.internal.TestMessageSetExtension3.text', index=0, + number=35, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=453, + serialized_end=493, +) + +DESCRIPTOR.message_types_by_name['TestMessageSet'] = _TESTMESSAGESET +DESCRIPTOR.message_types_by_name['TestMessageSetExtension1'] = _TESTMESSAGESETEXTENSION1 +DESCRIPTOR.message_types_by_name['TestMessageSetExtension2'] = _TESTMESSAGESETEXTENSION2 +DESCRIPTOR.message_types_by_name['TestMessageSetExtension3'] = _TESTMESSAGESETEXTENSION3 +DESCRIPTOR.extensions_by_name['message_set_extension3'] = message_set_extension3 + +TestMessageSet = _reflection.GeneratedProtocolMessageType('TestMessageSet', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESET, + __module__ = 'google.protobuf.internal.message_set_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSet) + )) +_sym_db.RegisterMessage(TestMessageSet) + +TestMessageSetExtension1 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension1', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETEXTENSION1, + __module__ = 'google.protobuf.internal.message_set_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension1) + )) +_sym_db.RegisterMessage(TestMessageSetExtension1) + +TestMessageSetExtension2 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension2', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETEXTENSION2, + __module__ = 'google.protobuf.internal.message_set_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension2) + )) +_sym_db.RegisterMessage(TestMessageSetExtension2) + +TestMessageSetExtension3 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension3', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETEXTENSION3, + __module__ = 'google.protobuf.internal.message_set_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension3) + )) +_sym_db.RegisterMessage(TestMessageSetExtension3) + +message_set_extension3.message_type = _TESTMESSAGESETEXTENSION3 +TestMessageSet.RegisterExtension(message_set_extension3) +_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION1 +TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) +_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION2 +TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + +_TESTMESSAGESET.has_options = True +_TESTMESSAGESET._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/message_test.py b/deps/google/protobuf/internal/message_test.py new file mode 100644 index 00000000..d03f2d25 --- /dev/null +++ b/deps/google/protobuf/internal/message_test.py @@ -0,0 +1,1780 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests python protocol buffers against the golden message. + +Note that the golden messages exercise every known field type, thus this +test ends up exercising and verifying nearly all of the parsing and +serialization code in the whole library. + +TODO(kenton): Merge with wire_format_test? It doesn't make a whole lot of +sense to call this a test of the "message" module, which only declares an +abstract interface. +""" + +__author__ = 'gps@google.com (Gregory P. Smith)' + + +import collections +import copy +import math +import operator +import pickle +import six +import sys + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf.internal import _parameterized +from google.protobuf import map_unittest_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import unittest_proto3_arena_pb2 +from google.protobuf.internal import any_test_pb2 +from google.protobuf.internal import api_implementation +from google.protobuf.internal import packed_field_test_pb2 +from google.protobuf.internal import test_util +from google.protobuf import message + +if six.PY3: + long = int + +# Python pre-2.6 does not have isinf() or isnan() functions, so we have +# to provide our own. +def isnan(val): + # NaN is never equal to itself. + return val != val +def isinf(val): + # Infinity times zero equals NaN. + return not isnan(val) and isnan(val * 0) +def IsPosInf(val): + return isinf(val) and (val > 0) +def IsNegInf(val): + return isinf(val) and (val < 0) + + +@_parameterized.Parameters( + (unittest_pb2), + (unittest_proto3_arena_pb2)) +class MessageTest(unittest.TestCase): + + def testBadUtf8String(self, message_module): + if api_implementation.Type() != 'python': + self.skipTest("Skipping testBadUtf8String, currently only the python " + "api implementation raises UnicodeDecodeError when a " + "string field contains bad utf-8.") + bad_utf8_data = test_util.GoldenFileData('bad_utf8_string') + with self.assertRaises(UnicodeDecodeError) as context: + message_module.TestAllTypes.FromString(bad_utf8_data) + self.assertIn('TestAllTypes.optional_string', str(context.exception)) + + def testGoldenMessage(self, message_module): + # Proto3 doesn't have the "default_foo" members or foreign enums, + # and doesn't preserve unknown fields, so for proto3 we use a golden + # message that doesn't have these fields set. + if message_module is unittest_pb2: + golden_data = test_util.GoldenFileData( + 'golden_message_oneof_implemented') + else: + golden_data = test_util.GoldenFileData('golden_message_proto3') + + golden_message = message_module.TestAllTypes() + golden_message.ParseFromString(golden_data) + if message_module is unittest_pb2: + test_util.ExpectAllFieldsSet(self, golden_message) + self.assertEqual(golden_data, golden_message.SerializeToString()) + golden_copy = copy.deepcopy(golden_message) + self.assertEqual(golden_data, golden_copy.SerializeToString()) + + def testGoldenPackedMessage(self, message_module): + golden_data = test_util.GoldenFileData('golden_packed_fields_message') + golden_message = message_module.TestPackedTypes() + golden_message.ParseFromString(golden_data) + all_set = message_module.TestPackedTypes() + test_util.SetAllPackedFields(all_set) + self.assertEqual(all_set, golden_message) + self.assertEqual(golden_data, all_set.SerializeToString()) + golden_copy = copy.deepcopy(golden_message) + self.assertEqual(golden_data, golden_copy.SerializeToString()) + + def testPickleSupport(self, message_module): + golden_data = test_util.GoldenFileData('golden_message') + golden_message = message_module.TestAllTypes() + golden_message.ParseFromString(golden_data) + pickled_message = pickle.dumps(golden_message) + + unpickled_message = pickle.loads(pickled_message) + self.assertEqual(unpickled_message, golden_message) + + def testPositiveInfinity(self, message_module): + if message_module is unittest_pb2: + golden_data = (b'\x5D\x00\x00\x80\x7F' + b'\x61\x00\x00\x00\x00\x00\x00\xF0\x7F' + b'\xCD\x02\x00\x00\x80\x7F' + b'\xD1\x02\x00\x00\x00\x00\x00\x00\xF0\x7F') + else: + golden_data = (b'\x5D\x00\x00\x80\x7F' + b'\x61\x00\x00\x00\x00\x00\x00\xF0\x7F' + b'\xCA\x02\x04\x00\x00\x80\x7F' + b'\xD2\x02\x08\x00\x00\x00\x00\x00\x00\xF0\x7F') + + golden_message = message_module.TestAllTypes() + golden_message.ParseFromString(golden_data) + self.assertTrue(IsPosInf(golden_message.optional_float)) + self.assertTrue(IsPosInf(golden_message.optional_double)) + self.assertTrue(IsPosInf(golden_message.repeated_float[0])) + self.assertTrue(IsPosInf(golden_message.repeated_double[0])) + self.assertEqual(golden_data, golden_message.SerializeToString()) + + def testNegativeInfinity(self, message_module): + if message_module is unittest_pb2: + golden_data = (b'\x5D\x00\x00\x80\xFF' + b'\x61\x00\x00\x00\x00\x00\x00\xF0\xFF' + b'\xCD\x02\x00\x00\x80\xFF' + b'\xD1\x02\x00\x00\x00\x00\x00\x00\xF0\xFF') + else: + golden_data = (b'\x5D\x00\x00\x80\xFF' + b'\x61\x00\x00\x00\x00\x00\x00\xF0\xFF' + b'\xCA\x02\x04\x00\x00\x80\xFF' + b'\xD2\x02\x08\x00\x00\x00\x00\x00\x00\xF0\xFF') + + golden_message = message_module.TestAllTypes() + golden_message.ParseFromString(golden_data) + self.assertTrue(IsNegInf(golden_message.optional_float)) + self.assertTrue(IsNegInf(golden_message.optional_double)) + self.assertTrue(IsNegInf(golden_message.repeated_float[0])) + self.assertTrue(IsNegInf(golden_message.repeated_double[0])) + self.assertEqual(golden_data, golden_message.SerializeToString()) + + def testNotANumber(self, message_module): + golden_data = (b'\x5D\x00\x00\xC0\x7F' + b'\x61\x00\x00\x00\x00\x00\x00\xF8\x7F' + b'\xCD\x02\x00\x00\xC0\x7F' + b'\xD1\x02\x00\x00\x00\x00\x00\x00\xF8\x7F') + golden_message = message_module.TestAllTypes() + golden_message.ParseFromString(golden_data) + self.assertTrue(isnan(golden_message.optional_float)) + self.assertTrue(isnan(golden_message.optional_double)) + self.assertTrue(isnan(golden_message.repeated_float[0])) + self.assertTrue(isnan(golden_message.repeated_double[0])) + + # The protocol buffer may serialize to any one of multiple different + # representations of a NaN. Rather than verify a specific representation, + # verify the serialized string can be converted into a correctly + # behaving protocol buffer. + serialized = golden_message.SerializeToString() + message = message_module.TestAllTypes() + message.ParseFromString(serialized) + self.assertTrue(isnan(message.optional_float)) + self.assertTrue(isnan(message.optional_double)) + self.assertTrue(isnan(message.repeated_float[0])) + self.assertTrue(isnan(message.repeated_double[0])) + + def testPositiveInfinityPacked(self, message_module): + golden_data = (b'\xA2\x06\x04\x00\x00\x80\x7F' + b'\xAA\x06\x08\x00\x00\x00\x00\x00\x00\xF0\x7F') + golden_message = message_module.TestPackedTypes() + golden_message.ParseFromString(golden_data) + self.assertTrue(IsPosInf(golden_message.packed_float[0])) + self.assertTrue(IsPosInf(golden_message.packed_double[0])) + self.assertEqual(golden_data, golden_message.SerializeToString()) + + def testNegativeInfinityPacked(self, message_module): + golden_data = (b'\xA2\x06\x04\x00\x00\x80\xFF' + b'\xAA\x06\x08\x00\x00\x00\x00\x00\x00\xF0\xFF') + golden_message = message_module.TestPackedTypes() + golden_message.ParseFromString(golden_data) + self.assertTrue(IsNegInf(golden_message.packed_float[0])) + self.assertTrue(IsNegInf(golden_message.packed_double[0])) + self.assertEqual(golden_data, golden_message.SerializeToString()) + + def testNotANumberPacked(self, message_module): + golden_data = (b'\xA2\x06\x04\x00\x00\xC0\x7F' + b'\xAA\x06\x08\x00\x00\x00\x00\x00\x00\xF8\x7F') + golden_message = message_module.TestPackedTypes() + golden_message.ParseFromString(golden_data) + self.assertTrue(isnan(golden_message.packed_float[0])) + self.assertTrue(isnan(golden_message.packed_double[0])) + + serialized = golden_message.SerializeToString() + message = message_module.TestPackedTypes() + message.ParseFromString(serialized) + self.assertTrue(isnan(message.packed_float[0])) + self.assertTrue(isnan(message.packed_double[0])) + + def testExtremeFloatValues(self, message_module): + message = message_module.TestAllTypes() + + # Most positive exponent, no significand bits set. + kMostPosExponentNoSigBits = math.pow(2, 127) + message.optional_float = kMostPosExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == kMostPosExponentNoSigBits) + + # Most positive exponent, one significand bit set. + kMostPosExponentOneSigBit = 1.5 * math.pow(2, 127) + message.optional_float = kMostPosExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == kMostPosExponentOneSigBit) + + # Repeat last two cases with values of same magnitude, but negative. + message.optional_float = -kMostPosExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == -kMostPosExponentNoSigBits) + + message.optional_float = -kMostPosExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == -kMostPosExponentOneSigBit) + + # Most negative exponent, no significand bits set. + kMostNegExponentNoSigBits = math.pow(2, -127) + message.optional_float = kMostNegExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == kMostNegExponentNoSigBits) + + # Most negative exponent, one significand bit set. + kMostNegExponentOneSigBit = 1.5 * math.pow(2, -127) + message.optional_float = kMostNegExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == kMostNegExponentOneSigBit) + + # Repeat last two cases with values of the same magnitude, but negative. + message.optional_float = -kMostNegExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == -kMostNegExponentNoSigBits) + + message.optional_float = -kMostNegExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_float == -kMostNegExponentOneSigBit) + + def testExtremeDoubleValues(self, message_module): + message = message_module.TestAllTypes() + + # Most positive exponent, no significand bits set. + kMostPosExponentNoSigBits = math.pow(2, 1023) + message.optional_double = kMostPosExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == kMostPosExponentNoSigBits) + + # Most positive exponent, one significand bit set. + kMostPosExponentOneSigBit = 1.5 * math.pow(2, 1023) + message.optional_double = kMostPosExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == kMostPosExponentOneSigBit) + + # Repeat last two cases with values of same magnitude, but negative. + message.optional_double = -kMostPosExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == -kMostPosExponentNoSigBits) + + message.optional_double = -kMostPosExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == -kMostPosExponentOneSigBit) + + # Most negative exponent, no significand bits set. + kMostNegExponentNoSigBits = math.pow(2, -1023) + message.optional_double = kMostNegExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == kMostNegExponentNoSigBits) + + # Most negative exponent, one significand bit set. + kMostNegExponentOneSigBit = 1.5 * math.pow(2, -1023) + message.optional_double = kMostNegExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == kMostNegExponentOneSigBit) + + # Repeat last two cases with values of the same magnitude, but negative. + message.optional_double = -kMostNegExponentNoSigBits + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == -kMostNegExponentNoSigBits) + + message.optional_double = -kMostNegExponentOneSigBit + message.ParseFromString(message.SerializeToString()) + self.assertTrue(message.optional_double == -kMostNegExponentOneSigBit) + + def testFloatPrinting(self, message_module): + message = message_module.TestAllTypes() + message.optional_float = 2.0 + self.assertEqual(str(message), 'optional_float: 2.0\n') + + def testHighPrecisionFloatPrinting(self, message_module): + message = message_module.TestAllTypes() + message.optional_double = 0.12345678912345678 + if sys.version_info >= (3,): + self.assertEqual(str(message), 'optional_double: 0.12345678912345678\n') + else: + self.assertEqual(str(message), 'optional_double: 0.123456789123\n') + + def testUnknownFieldPrinting(self, message_module): + populated = message_module.TestAllTypes() + test_util.SetAllNonLazyFields(populated) + empty = message_module.TestEmptyMessage() + empty.ParseFromString(populated.SerializeToString()) + self.assertEqual(str(empty), '') + + def testRepeatedNestedFieldIteration(self, message_module): + msg = message_module.TestAllTypes() + msg.repeated_nested_message.add(bb=1) + msg.repeated_nested_message.add(bb=2) + msg.repeated_nested_message.add(bb=3) + msg.repeated_nested_message.add(bb=4) + + self.assertEqual([1, 2, 3, 4], + [m.bb for m in msg.repeated_nested_message]) + self.assertEqual([4, 3, 2, 1], + [m.bb for m in reversed(msg.repeated_nested_message)]) + self.assertEqual([4, 3, 2, 1], + [m.bb for m in msg.repeated_nested_message[::-1]]) + + def testSortingRepeatedScalarFieldsDefaultComparator(self, message_module): + """Check some different types with the default comparator.""" + message = message_module.TestAllTypes() + + # TODO(mattp): would testing more scalar types strengthen test? + message.repeated_int32.append(1) + message.repeated_int32.append(3) + message.repeated_int32.append(2) + message.repeated_int32.sort() + self.assertEqual(message.repeated_int32[0], 1) + self.assertEqual(message.repeated_int32[1], 2) + self.assertEqual(message.repeated_int32[2], 3) + + message.repeated_float.append(1.1) + message.repeated_float.append(1.3) + message.repeated_float.append(1.2) + message.repeated_float.sort() + self.assertAlmostEqual(message.repeated_float[0], 1.1) + self.assertAlmostEqual(message.repeated_float[1], 1.2) + self.assertAlmostEqual(message.repeated_float[2], 1.3) + + message.repeated_string.append('a') + message.repeated_string.append('c') + message.repeated_string.append('b') + message.repeated_string.sort() + self.assertEqual(message.repeated_string[0], 'a') + self.assertEqual(message.repeated_string[1], 'b') + self.assertEqual(message.repeated_string[2], 'c') + + message.repeated_bytes.append(b'a') + message.repeated_bytes.append(b'c') + message.repeated_bytes.append(b'b') + message.repeated_bytes.sort() + self.assertEqual(message.repeated_bytes[0], b'a') + self.assertEqual(message.repeated_bytes[1], b'b') + self.assertEqual(message.repeated_bytes[2], b'c') + + def testSortingRepeatedScalarFieldsCustomComparator(self, message_module): + """Check some different types with custom comparator.""" + message = message_module.TestAllTypes() + + message.repeated_int32.append(-3) + message.repeated_int32.append(-2) + message.repeated_int32.append(-1) + message.repeated_int32.sort(key=abs) + self.assertEqual(message.repeated_int32[0], -1) + self.assertEqual(message.repeated_int32[1], -2) + self.assertEqual(message.repeated_int32[2], -3) + + message.repeated_string.append('aaa') + message.repeated_string.append('bb') + message.repeated_string.append('c') + message.repeated_string.sort(key=len) + self.assertEqual(message.repeated_string[0], 'c') + self.assertEqual(message.repeated_string[1], 'bb') + self.assertEqual(message.repeated_string[2], 'aaa') + + def testSortingRepeatedCompositeFieldsCustomComparator(self, message_module): + """Check passing a custom comparator to sort a repeated composite field.""" + message = message_module.TestAllTypes() + + message.repeated_nested_message.add().bb = 1 + message.repeated_nested_message.add().bb = 3 + message.repeated_nested_message.add().bb = 2 + message.repeated_nested_message.add().bb = 6 + message.repeated_nested_message.add().bb = 5 + message.repeated_nested_message.add().bb = 4 + message.repeated_nested_message.sort(key=operator.attrgetter('bb')) + self.assertEqual(message.repeated_nested_message[0].bb, 1) + self.assertEqual(message.repeated_nested_message[1].bb, 2) + self.assertEqual(message.repeated_nested_message[2].bb, 3) + self.assertEqual(message.repeated_nested_message[3].bb, 4) + self.assertEqual(message.repeated_nested_message[4].bb, 5) + self.assertEqual(message.repeated_nested_message[5].bb, 6) + + def testSortingRepeatedCompositeFieldsStable(self, message_module): + """Check passing a custom comparator to sort a repeated composite field.""" + message = message_module.TestAllTypes() + + message.repeated_nested_message.add().bb = 21 + message.repeated_nested_message.add().bb = 20 + message.repeated_nested_message.add().bb = 13 + message.repeated_nested_message.add().bb = 33 + message.repeated_nested_message.add().bb = 11 + message.repeated_nested_message.add().bb = 24 + message.repeated_nested_message.add().bb = 10 + message.repeated_nested_message.sort(key=lambda z: z.bb // 10) + self.assertEqual( + [13, 11, 10, 21, 20, 24, 33], + [n.bb for n in message.repeated_nested_message]) + + # Make sure that for the C++ implementation, the underlying fields + # are actually reordered. + pb = message.SerializeToString() + message.Clear() + message.MergeFromString(pb) + self.assertEqual( + [13, 11, 10, 21, 20, 24, 33], + [n.bb for n in message.repeated_nested_message]) + + def testRepeatedCompositeFieldSortArguments(self, message_module): + """Check sorting a repeated composite field using list.sort() arguments.""" + message = message_module.TestAllTypes() + + get_bb = operator.attrgetter('bb') + cmp_bb = lambda a, b: cmp(a.bb, b.bb) + message.repeated_nested_message.add().bb = 1 + message.repeated_nested_message.add().bb = 3 + message.repeated_nested_message.add().bb = 2 + message.repeated_nested_message.add().bb = 6 + message.repeated_nested_message.add().bb = 5 + message.repeated_nested_message.add().bb = 4 + message.repeated_nested_message.sort(key=get_bb) + self.assertEqual([k.bb for k in message.repeated_nested_message], + [1, 2, 3, 4, 5, 6]) + message.repeated_nested_message.sort(key=get_bb, reverse=True) + self.assertEqual([k.bb for k in message.repeated_nested_message], + [6, 5, 4, 3, 2, 1]) + if sys.version_info >= (3,): return # No cmp sorting in PY3. + message.repeated_nested_message.sort(sort_function=cmp_bb) + self.assertEqual([k.bb for k in message.repeated_nested_message], + [1, 2, 3, 4, 5, 6]) + message.repeated_nested_message.sort(cmp=cmp_bb, reverse=True) + self.assertEqual([k.bb for k in message.repeated_nested_message], + [6, 5, 4, 3, 2, 1]) + + def testRepeatedScalarFieldSortArguments(self, message_module): + """Check sorting a scalar field using list.sort() arguments.""" + message = message_module.TestAllTypes() + + message.repeated_int32.append(-3) + message.repeated_int32.append(-2) + message.repeated_int32.append(-1) + message.repeated_int32.sort(key=abs) + self.assertEqual(list(message.repeated_int32), [-1, -2, -3]) + message.repeated_int32.sort(key=abs, reverse=True) + self.assertEqual(list(message.repeated_int32), [-3, -2, -1]) + if sys.version_info < (3,): # No cmp sorting in PY3. + abs_cmp = lambda a, b: cmp(abs(a), abs(b)) + message.repeated_int32.sort(sort_function=abs_cmp) + self.assertEqual(list(message.repeated_int32), [-1, -2, -3]) + message.repeated_int32.sort(cmp=abs_cmp, reverse=True) + self.assertEqual(list(message.repeated_int32), [-3, -2, -1]) + + message.repeated_string.append('aaa') + message.repeated_string.append('bb') + message.repeated_string.append('c') + message.repeated_string.sort(key=len) + self.assertEqual(list(message.repeated_string), ['c', 'bb', 'aaa']) + message.repeated_string.sort(key=len, reverse=True) + self.assertEqual(list(message.repeated_string), ['aaa', 'bb', 'c']) + if sys.version_info < (3,): # No cmp sorting in PY3. + len_cmp = lambda a, b: cmp(len(a), len(b)) + message.repeated_string.sort(sort_function=len_cmp) + self.assertEqual(list(message.repeated_string), ['c', 'bb', 'aaa']) + message.repeated_string.sort(cmp=len_cmp, reverse=True) + self.assertEqual(list(message.repeated_string), ['aaa', 'bb', 'c']) + + def testRepeatedFieldsComparable(self, message_module): + m1 = message_module.TestAllTypes() + m2 = message_module.TestAllTypes() + m1.repeated_int32.append(0) + m1.repeated_int32.append(1) + m1.repeated_int32.append(2) + m2.repeated_int32.append(0) + m2.repeated_int32.append(1) + m2.repeated_int32.append(2) + m1.repeated_nested_message.add().bb = 1 + m1.repeated_nested_message.add().bb = 2 + m1.repeated_nested_message.add().bb = 3 + m2.repeated_nested_message.add().bb = 1 + m2.repeated_nested_message.add().bb = 2 + m2.repeated_nested_message.add().bb = 3 + + if sys.version_info >= (3,): return # No cmp() in PY3. + + # These comparisons should not raise errors. + _ = m1 < m2 + _ = m1.repeated_nested_message < m2.repeated_nested_message + + # Make sure cmp always works. If it wasn't defined, these would be + # id() comparisons and would all fail. + self.assertEqual(cmp(m1, m2), 0) + self.assertEqual(cmp(m1.repeated_int32, m2.repeated_int32), 0) + self.assertEqual(cmp(m1.repeated_int32, [0, 1, 2]), 0) + self.assertEqual(cmp(m1.repeated_nested_message, + m2.repeated_nested_message), 0) + with self.assertRaises(TypeError): + # Can't compare repeated composite containers to lists. + cmp(m1.repeated_nested_message, m2.repeated_nested_message[:]) + + # TODO(anuraag): Implement extensiondict comparison in C++ and then add test + + def testRepeatedFieldsAreSequences(self, message_module): + m = message_module.TestAllTypes() + self.assertIsInstance(m.repeated_int32, collections.MutableSequence) + self.assertIsInstance(m.repeated_nested_message, + collections.MutableSequence) + + def ensureNestedMessageExists(self, msg, attribute): + """Make sure that a nested message object exists. + + As soon as a nested message attribute is accessed, it will be present in the + _fields dict, without being marked as actually being set. + """ + getattr(msg, attribute) + self.assertFalse(msg.HasField(attribute)) + + def testOneofGetCaseNonexistingField(self, message_module): + m = message_module.TestAllTypes() + self.assertRaises(ValueError, m.WhichOneof, 'no_such_oneof_field') + + def testOneofDefaultValues(self, message_module): + m = message_module.TestAllTypes() + self.assertIs(None, m.WhichOneof('oneof_field')) + self.assertFalse(m.HasField('oneof_uint32')) + + # Oneof is set even when setting it to a default value. + m.oneof_uint32 = 0 + self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field')) + self.assertTrue(m.HasField('oneof_uint32')) + self.assertFalse(m.HasField('oneof_string')) + + m.oneof_string = "" + self.assertEqual('oneof_string', m.WhichOneof('oneof_field')) + self.assertTrue(m.HasField('oneof_string')) + self.assertFalse(m.HasField('oneof_uint32')) + + def testOneofSemantics(self, message_module): + m = message_module.TestAllTypes() + self.assertIs(None, m.WhichOneof('oneof_field')) + + m.oneof_uint32 = 11 + self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field')) + self.assertTrue(m.HasField('oneof_uint32')) + + m.oneof_string = u'foo' + self.assertEqual('oneof_string', m.WhichOneof('oneof_field')) + self.assertFalse(m.HasField('oneof_uint32')) + self.assertTrue(m.HasField('oneof_string')) + + # Read nested message accessor without accessing submessage. + m.oneof_nested_message + self.assertEqual('oneof_string', m.WhichOneof('oneof_field')) + self.assertTrue(m.HasField('oneof_string')) + self.assertFalse(m.HasField('oneof_nested_message')) + + # Read accessor of nested message without accessing submessage. + m.oneof_nested_message.bb + self.assertEqual('oneof_string', m.WhichOneof('oneof_field')) + self.assertTrue(m.HasField('oneof_string')) + self.assertFalse(m.HasField('oneof_nested_message')) + + m.oneof_nested_message.bb = 11 + self.assertEqual('oneof_nested_message', m.WhichOneof('oneof_field')) + self.assertFalse(m.HasField('oneof_string')) + self.assertTrue(m.HasField('oneof_nested_message')) + + m.oneof_bytes = b'bb' + self.assertEqual('oneof_bytes', m.WhichOneof('oneof_field')) + self.assertFalse(m.HasField('oneof_nested_message')) + self.assertTrue(m.HasField('oneof_bytes')) + + def testOneofCompositeFieldReadAccess(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + + self.ensureNestedMessageExists(m, 'oneof_nested_message') + self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field')) + self.assertEqual(11, m.oneof_uint32) + + def testOneofWhichOneof(self, message_module): + m = message_module.TestAllTypes() + self.assertIs(None, m.WhichOneof('oneof_field')) + if message_module is unittest_pb2: + self.assertFalse(m.HasField('oneof_field')) + + m.oneof_uint32 = 11 + self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field')) + if message_module is unittest_pb2: + self.assertTrue(m.HasField('oneof_field')) + + m.oneof_bytes = b'bb' + self.assertEqual('oneof_bytes', m.WhichOneof('oneof_field')) + + m.ClearField('oneof_bytes') + self.assertIs(None, m.WhichOneof('oneof_field')) + if message_module is unittest_pb2: + self.assertFalse(m.HasField('oneof_field')) + + def testOneofClearField(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + m.ClearField('oneof_field') + if message_module is unittest_pb2: + self.assertFalse(m.HasField('oneof_field')) + self.assertFalse(m.HasField('oneof_uint32')) + self.assertIs(None, m.WhichOneof('oneof_field')) + + def testOneofClearSetField(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + m.ClearField('oneof_uint32') + if message_module is unittest_pb2: + self.assertFalse(m.HasField('oneof_field')) + self.assertFalse(m.HasField('oneof_uint32')) + self.assertIs(None, m.WhichOneof('oneof_field')) + + def testOneofClearUnsetField(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + self.ensureNestedMessageExists(m, 'oneof_nested_message') + m.ClearField('oneof_nested_message') + self.assertEqual(11, m.oneof_uint32) + if message_module is unittest_pb2: + self.assertTrue(m.HasField('oneof_field')) + self.assertTrue(m.HasField('oneof_uint32')) + self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field')) + + def testOneofDeserialize(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + m2 = message_module.TestAllTypes() + m2.ParseFromString(m.SerializeToString()) + self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field')) + + def testOneofCopyFrom(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + m2 = message_module.TestAllTypes() + m2.CopyFrom(m) + self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field')) + + def testOneofNestedMergeFrom(self, message_module): + m = message_module.NestedTestAllTypes() + m.payload.oneof_uint32 = 11 + m2 = message_module.NestedTestAllTypes() + m2.payload.oneof_bytes = b'bb' + m2.child.payload.oneof_bytes = b'bb' + m2.MergeFrom(m) + self.assertEqual('oneof_uint32', m2.payload.WhichOneof('oneof_field')) + self.assertEqual('oneof_bytes', m2.child.payload.WhichOneof('oneof_field')) + + def testOneofMessageMergeFrom(self, message_module): + m = message_module.NestedTestAllTypes() + m.payload.oneof_nested_message.bb = 11 + m.child.payload.oneof_nested_message.bb = 12 + m2 = message_module.NestedTestAllTypes() + m2.payload.oneof_uint32 = 13 + m2.MergeFrom(m) + self.assertEqual('oneof_nested_message', + m2.payload.WhichOneof('oneof_field')) + self.assertEqual('oneof_nested_message', + m2.child.payload.WhichOneof('oneof_field')) + + def testOneofNestedMessageInit(self, message_module): + m = message_module.TestAllTypes( + oneof_nested_message=message_module.TestAllTypes.NestedMessage()) + self.assertEqual('oneof_nested_message', m.WhichOneof('oneof_field')) + + def testOneofClear(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + m.Clear() + self.assertIsNone(m.WhichOneof('oneof_field')) + m.oneof_bytes = b'bb' + self.assertEqual('oneof_bytes', m.WhichOneof('oneof_field')) + + def testAssignByteStringToUnicodeField(self, message_module): + """Assigning a byte string to a string field should result + in the value being converted to a Unicode string.""" + m = message_module.TestAllTypes() + m.optional_string = str('') + self.assertIsInstance(m.optional_string, six.text_type) + + def testLongValuedSlice(self, message_module): + """It should be possible to use long-valued indicies in slices + + This didn't used to work in the v2 C++ implementation. + """ + m = message_module.TestAllTypes() + + # Repeated scalar + m.repeated_int32.append(1) + sl = m.repeated_int32[long(0):long(len(m.repeated_int32))] + self.assertEqual(len(m.repeated_int32), len(sl)) + + # Repeated composite + m.repeated_nested_message.add().bb = 3 + sl = m.repeated_nested_message[long(0):long(len(m.repeated_nested_message))] + self.assertEqual(len(m.repeated_nested_message), len(sl)) + + def testExtendShouldNotSwallowExceptions(self, message_module): + """This didn't use to work in the v2 C++ implementation.""" + m = message_module.TestAllTypes() + with self.assertRaises(NameError) as _: + m.repeated_int32.extend(a for i in range(10)) # pylint: disable=undefined-variable + with self.assertRaises(NameError) as _: + m.repeated_nested_enum.extend( + a for i in range(10)) # pylint: disable=undefined-variable + + FALSY_VALUES = [None, False, 0, 0.0, b'', u'', bytearray(), [], {}, set()] + + def testExtendInt32WithNothing(self, message_module): + """Test no-ops extending repeated int32 fields.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_int32) + + # TODO(ptucker): Deprecate this behavior. b/18413862 + for falsy_value in MessageTest.FALSY_VALUES: + m.repeated_int32.extend(falsy_value) + self.assertSequenceEqual([], m.repeated_int32) + + m.repeated_int32.extend([]) + self.assertSequenceEqual([], m.repeated_int32) + + def testExtendFloatWithNothing(self, message_module): + """Test no-ops extending repeated float fields.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_float) + + # TODO(ptucker): Deprecate this behavior. b/18413862 + for falsy_value in MessageTest.FALSY_VALUES: + m.repeated_float.extend(falsy_value) + self.assertSequenceEqual([], m.repeated_float) + + m.repeated_float.extend([]) + self.assertSequenceEqual([], m.repeated_float) + + def testExtendStringWithNothing(self, message_module): + """Test no-ops extending repeated string fields.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_string) + + # TODO(ptucker): Deprecate this behavior. b/18413862 + for falsy_value in MessageTest.FALSY_VALUES: + m.repeated_string.extend(falsy_value) + self.assertSequenceEqual([], m.repeated_string) + + m.repeated_string.extend([]) + self.assertSequenceEqual([], m.repeated_string) + + def testExtendInt32WithPythonList(self, message_module): + """Test extending repeated int32 fields with python lists.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_int32) + m.repeated_int32.extend([0]) + self.assertSequenceEqual([0], m.repeated_int32) + m.repeated_int32.extend([1, 2]) + self.assertSequenceEqual([0, 1, 2], m.repeated_int32) + m.repeated_int32.extend([3, 4]) + self.assertSequenceEqual([0, 1, 2, 3, 4], m.repeated_int32) + + def testExtendFloatWithPythonList(self, message_module): + """Test extending repeated float fields with python lists.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_float) + m.repeated_float.extend([0.0]) + self.assertSequenceEqual([0.0], m.repeated_float) + m.repeated_float.extend([1.0, 2.0]) + self.assertSequenceEqual([0.0, 1.0, 2.0], m.repeated_float) + m.repeated_float.extend([3.0, 4.0]) + self.assertSequenceEqual([0.0, 1.0, 2.0, 3.0, 4.0], m.repeated_float) + + def testExtendStringWithPythonList(self, message_module): + """Test extending repeated string fields with python lists.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_string) + m.repeated_string.extend(['']) + self.assertSequenceEqual([''], m.repeated_string) + m.repeated_string.extend(['11', '22']) + self.assertSequenceEqual(['', '11', '22'], m.repeated_string) + m.repeated_string.extend(['33', '44']) + self.assertSequenceEqual(['', '11', '22', '33', '44'], m.repeated_string) + + def testExtendStringWithString(self, message_module): + """Test extending repeated string fields with characters from a string.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_string) + m.repeated_string.extend('abc') + self.assertSequenceEqual(['a', 'b', 'c'], m.repeated_string) + + class TestIterable(object): + """This iterable object mimics the behavior of numpy.array. + + __nonzero__ fails for length > 1, and returns bool(item[0]) for length == 1. + + """ + + def __init__(self, values=None): + self._list = values or [] + + def __nonzero__(self): + size = len(self._list) + if size == 0: + return False + if size == 1: + return bool(self._list[0]) + raise ValueError('Truth value is ambiguous.') + + def __len__(self): + return len(self._list) + + def __iter__(self): + return self._list.__iter__() + + def testExtendInt32WithIterable(self, message_module): + """Test extending repeated int32 fields with iterable.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_int32) + m.repeated_int32.extend(MessageTest.TestIterable([])) + self.assertSequenceEqual([], m.repeated_int32) + m.repeated_int32.extend(MessageTest.TestIterable([0])) + self.assertSequenceEqual([0], m.repeated_int32) + m.repeated_int32.extend(MessageTest.TestIterable([1, 2])) + self.assertSequenceEqual([0, 1, 2], m.repeated_int32) + m.repeated_int32.extend(MessageTest.TestIterable([3, 4])) + self.assertSequenceEqual([0, 1, 2, 3, 4], m.repeated_int32) + + def testExtendFloatWithIterable(self, message_module): + """Test extending repeated float fields with iterable.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_float) + m.repeated_float.extend(MessageTest.TestIterable([])) + self.assertSequenceEqual([], m.repeated_float) + m.repeated_float.extend(MessageTest.TestIterable([0.0])) + self.assertSequenceEqual([0.0], m.repeated_float) + m.repeated_float.extend(MessageTest.TestIterable([1.0, 2.0])) + self.assertSequenceEqual([0.0, 1.0, 2.0], m.repeated_float) + m.repeated_float.extend(MessageTest.TestIterable([3.0, 4.0])) + self.assertSequenceEqual([0.0, 1.0, 2.0, 3.0, 4.0], m.repeated_float) + + def testExtendStringWithIterable(self, message_module): + """Test extending repeated string fields with iterable.""" + m = message_module.TestAllTypes() + self.assertSequenceEqual([], m.repeated_string) + m.repeated_string.extend(MessageTest.TestIterable([])) + self.assertSequenceEqual([], m.repeated_string) + m.repeated_string.extend(MessageTest.TestIterable([''])) + self.assertSequenceEqual([''], m.repeated_string) + m.repeated_string.extend(MessageTest.TestIterable(['1', '2'])) + self.assertSequenceEqual(['', '1', '2'], m.repeated_string) + m.repeated_string.extend(MessageTest.TestIterable(['3', '4'])) + self.assertSequenceEqual(['', '1', '2', '3', '4'], m.repeated_string) + + def testPickleRepeatedScalarContainer(self, message_module): + # TODO(tibell): The pure-Python implementation support pickling of + # scalar containers in *some* cases. For now the cpp2 version + # throws an exception to avoid a segfault. Investigate if we + # want to support pickling of these fields. + # + # For more information see: https://b2.corp.google.com/u/0/issues/18677897 + if (api_implementation.Type() != 'cpp' or + api_implementation.Version() == 2): + return + m = message_module.TestAllTypes() + with self.assertRaises(pickle.PickleError) as _: + pickle.dumps(m.repeated_int32, pickle.HIGHEST_PROTOCOL) + + def testSortEmptyRepeatedCompositeContainer(self, message_module): + """Exercise a scenario that has led to segfaults in the past. + """ + m = message_module.TestAllTypes() + m.repeated_nested_message.sort() + + def testHasFieldOnRepeatedField(self, message_module): + """Using HasField on a repeated field should raise an exception. + """ + m = message_module.TestAllTypes() + with self.assertRaises(ValueError) as _: + m.HasField('repeated_int32') + + def testRepeatedScalarFieldPop(self, message_module): + m = message_module.TestAllTypes() + with self.assertRaises(IndexError) as _: + m.repeated_int32.pop() + m.repeated_int32.extend(range(5)) + self.assertEqual(4, m.repeated_int32.pop()) + self.assertEqual(0, m.repeated_int32.pop(0)) + self.assertEqual(2, m.repeated_int32.pop(1)) + self.assertEqual([1, 3], m.repeated_int32) + + def testRepeatedCompositeFieldPop(self, message_module): + m = message_module.TestAllTypes() + with self.assertRaises(IndexError) as _: + m.repeated_nested_message.pop() + for i in range(5): + n = m.repeated_nested_message.add() + n.bb = i + self.assertEqual(4, m.repeated_nested_message.pop().bb) + self.assertEqual(0, m.repeated_nested_message.pop(0).bb) + self.assertEqual(2, m.repeated_nested_message.pop(1).bb) + self.assertEqual([1, 3], [n.bb for n in m.repeated_nested_message]) + + +# Class to test proto2-only features (required, extensions, etc.) +class Proto2Test(unittest.TestCase): + + def testFieldPresence(self): + message = unittest_pb2.TestAllTypes() + + self.assertFalse(message.HasField("optional_int32")) + self.assertFalse(message.HasField("optional_bool")) + self.assertFalse(message.HasField("optional_nested_message")) + + with self.assertRaises(ValueError): + message.HasField("field_doesnt_exist") + + with self.assertRaises(ValueError): + message.HasField("repeated_int32") + with self.assertRaises(ValueError): + message.HasField("repeated_nested_message") + + self.assertEqual(0, message.optional_int32) + self.assertEqual(False, message.optional_bool) + self.assertEqual(0, message.optional_nested_message.bb) + + # Fields are set even when setting the values to default values. + message.optional_int32 = 0 + message.optional_bool = False + message.optional_nested_message.bb = 0 + self.assertTrue(message.HasField("optional_int32")) + self.assertTrue(message.HasField("optional_bool")) + self.assertTrue(message.HasField("optional_nested_message")) + + # Set the fields to non-default values. + message.optional_int32 = 5 + message.optional_bool = True + message.optional_nested_message.bb = 15 + + self.assertTrue(message.HasField("optional_int32")) + self.assertTrue(message.HasField("optional_bool")) + self.assertTrue(message.HasField("optional_nested_message")) + + # Clearing the fields unsets them and resets their value to default. + message.ClearField("optional_int32") + message.ClearField("optional_bool") + message.ClearField("optional_nested_message") + + self.assertFalse(message.HasField("optional_int32")) + self.assertFalse(message.HasField("optional_bool")) + self.assertFalse(message.HasField("optional_nested_message")) + self.assertEqual(0, message.optional_int32) + self.assertEqual(False, message.optional_bool) + self.assertEqual(0, message.optional_nested_message.bb) + + # TODO(tibell): The C++ implementations actually allows assignment + # of unknown enum values to *scalar* fields (but not repeated + # fields). Once checked enum fields becomes the default in the + # Python implementation, the C++ implementation should follow suit. + def testAssignInvalidEnum(self): + """It should not be possible to assign an invalid enum number to an + enum field.""" + m = unittest_pb2.TestAllTypes() + + with self.assertRaises(ValueError) as _: + m.optional_nested_enum = 1234567 + self.assertRaises(ValueError, m.repeated_nested_enum.append, 1234567) + + def testGoldenExtensions(self): + golden_data = test_util.GoldenFileData('golden_message') + golden_message = unittest_pb2.TestAllExtensions() + golden_message.ParseFromString(golden_data) + all_set = unittest_pb2.TestAllExtensions() + test_util.SetAllExtensions(all_set) + self.assertEqual(all_set, golden_message) + self.assertEqual(golden_data, golden_message.SerializeToString()) + golden_copy = copy.deepcopy(golden_message) + self.assertEqual(golden_data, golden_copy.SerializeToString()) + + def testGoldenPackedExtensions(self): + golden_data = test_util.GoldenFileData('golden_packed_fields_message') + golden_message = unittest_pb2.TestPackedExtensions() + golden_message.ParseFromString(golden_data) + all_set = unittest_pb2.TestPackedExtensions() + test_util.SetAllPackedExtensions(all_set) + self.assertEqual(all_set, golden_message) + self.assertEqual(golden_data, all_set.SerializeToString()) + golden_copy = copy.deepcopy(golden_message) + self.assertEqual(golden_data, golden_copy.SerializeToString()) + + def testPickleIncompleteProto(self): + golden_message = unittest_pb2.TestRequired(a=1) + pickled_message = pickle.dumps(golden_message) + + unpickled_message = pickle.loads(pickled_message) + self.assertEqual(unpickled_message, golden_message) + self.assertEqual(unpickled_message.a, 1) + # This is still an incomplete proto - so serializing should fail + self.assertRaises(message.EncodeError, unpickled_message.SerializeToString) + + + # TODO(haberman): this isn't really a proto2-specific test except that this + # message has a required field in it. Should probably be factored out so + # that we can test the other parts with proto3. + def testParsingMerge(self): + """Check the merge behavior when a required or optional field appears + multiple times in the input.""" + messages = [ + unittest_pb2.TestAllTypes(), + unittest_pb2.TestAllTypes(), + unittest_pb2.TestAllTypes() ] + messages[0].optional_int32 = 1 + messages[1].optional_int64 = 2 + messages[2].optional_int32 = 3 + messages[2].optional_string = 'hello' + + merged_message = unittest_pb2.TestAllTypes() + merged_message.optional_int32 = 3 + merged_message.optional_int64 = 2 + merged_message.optional_string = 'hello' + + generator = unittest_pb2.TestParsingMerge.RepeatedFieldsGenerator() + generator.field1.extend(messages) + generator.field2.extend(messages) + generator.field3.extend(messages) + generator.ext1.extend(messages) + generator.ext2.extend(messages) + generator.group1.add().field1.MergeFrom(messages[0]) + generator.group1.add().field1.MergeFrom(messages[1]) + generator.group1.add().field1.MergeFrom(messages[2]) + generator.group2.add().field1.MergeFrom(messages[0]) + generator.group2.add().field1.MergeFrom(messages[1]) + generator.group2.add().field1.MergeFrom(messages[2]) + + data = generator.SerializeToString() + parsing_merge = unittest_pb2.TestParsingMerge() + parsing_merge.ParseFromString(data) + + # Required and optional fields should be merged. + self.assertEqual(parsing_merge.required_all_types, merged_message) + self.assertEqual(parsing_merge.optional_all_types, merged_message) + self.assertEqual(parsing_merge.optionalgroup.optional_group_all_types, + merged_message) + self.assertEqual(parsing_merge.Extensions[ + unittest_pb2.TestParsingMerge.optional_ext], + merged_message) + + # Repeated fields should not be merged. + self.assertEqual(len(parsing_merge.repeated_all_types), 3) + self.assertEqual(len(parsing_merge.repeatedgroup), 3) + self.assertEqual(len(parsing_merge.Extensions[ + unittest_pb2.TestParsingMerge.repeated_ext]), 3) + + def testPythonicInit(self): + message = unittest_pb2.TestAllTypes( + optional_int32=100, + optional_fixed32=200, + optional_float=300.5, + optional_bytes=b'x', + optionalgroup={'a': 400}, + optional_nested_message={'bb': 500}, + optional_nested_enum='BAZ', + repeatedgroup=[{'a': 600}, + {'a': 700}], + repeated_nested_enum=['FOO', unittest_pb2.TestAllTypes.BAR], + default_int32=800, + oneof_string='y') + self.assertIsInstance(message, unittest_pb2.TestAllTypes) + self.assertEqual(100, message.optional_int32) + self.assertEqual(200, message.optional_fixed32) + self.assertEqual(300.5, message.optional_float) + self.assertEqual(b'x', message.optional_bytes) + self.assertEqual(400, message.optionalgroup.a) + self.assertIsInstance(message.optional_nested_message, unittest_pb2.TestAllTypes.NestedMessage) + self.assertEqual(500, message.optional_nested_message.bb) + self.assertEqual(unittest_pb2.TestAllTypes.BAZ, + message.optional_nested_enum) + self.assertEqual(2, len(message.repeatedgroup)) + self.assertEqual(600, message.repeatedgroup[0].a) + self.assertEqual(700, message.repeatedgroup[1].a) + self.assertEqual(2, len(message.repeated_nested_enum)) + self.assertEqual(unittest_pb2.TestAllTypes.FOO, + message.repeated_nested_enum[0]) + self.assertEqual(unittest_pb2.TestAllTypes.BAR, + message.repeated_nested_enum[1]) + self.assertEqual(800, message.default_int32) + self.assertEqual('y', message.oneof_string) + self.assertFalse(message.HasField('optional_int64')) + self.assertEqual(0, len(message.repeated_float)) + self.assertEqual(42, message.default_int64) + + message = unittest_pb2.TestAllTypes(optional_nested_enum=u'BAZ') + self.assertEqual(unittest_pb2.TestAllTypes.BAZ, + message.optional_nested_enum) + + with self.assertRaises(ValueError): + unittest_pb2.TestAllTypes( + optional_nested_message={'INVALID_NESTED_FIELD': 17}) + + with self.assertRaises(TypeError): + unittest_pb2.TestAllTypes( + optional_nested_message={'bb': 'INVALID_VALUE_TYPE'}) + + with self.assertRaises(ValueError): + unittest_pb2.TestAllTypes(optional_nested_enum='INVALID_LABEL') + + with self.assertRaises(ValueError): + unittest_pb2.TestAllTypes(repeated_nested_enum='FOO') + + +# Class to test proto3-only features/behavior (updated field presence & enums) +class Proto3Test(unittest.TestCase): + + # Utility method for comparing equality with a map. + def assertMapIterEquals(self, map_iter, dict_value): + # Avoid mutating caller's copy. + dict_value = dict(dict_value) + + for k, v in map_iter: + self.assertEqual(v, dict_value[k]) + del dict_value[k] + + self.assertEqual({}, dict_value) + + def testFieldPresence(self): + message = unittest_proto3_arena_pb2.TestAllTypes() + + # We can't test presence of non-repeated, non-submessage fields. + with self.assertRaises(ValueError): + message.HasField('optional_int32') + with self.assertRaises(ValueError): + message.HasField('optional_float') + with self.assertRaises(ValueError): + message.HasField('optional_string') + with self.assertRaises(ValueError): + message.HasField('optional_bool') + + # But we can still test presence of submessage fields. + self.assertFalse(message.HasField('optional_nested_message')) + + # As with proto2, we can't test presence of fields that don't exist, or + # repeated fields. + with self.assertRaises(ValueError): + message.HasField('field_doesnt_exist') + + with self.assertRaises(ValueError): + message.HasField('repeated_int32') + with self.assertRaises(ValueError): + message.HasField('repeated_nested_message') + + # Fields should default to their type-specific default. + self.assertEqual(0, message.optional_int32) + self.assertEqual(0, message.optional_float) + self.assertEqual('', message.optional_string) + self.assertEqual(False, message.optional_bool) + self.assertEqual(0, message.optional_nested_message.bb) + + # Setting a submessage should still return proper presence information. + message.optional_nested_message.bb = 0 + self.assertTrue(message.HasField('optional_nested_message')) + + # Set the fields to non-default values. + message.optional_int32 = 5 + message.optional_float = 1.1 + message.optional_string = 'abc' + message.optional_bool = True + message.optional_nested_message.bb = 15 + + # Clearing the fields unsets them and resets their value to default. + message.ClearField('optional_int32') + message.ClearField('optional_float') + message.ClearField('optional_string') + message.ClearField('optional_bool') + message.ClearField('optional_nested_message') + + self.assertEqual(0, message.optional_int32) + self.assertEqual(0, message.optional_float) + self.assertEqual('', message.optional_string) + self.assertEqual(False, message.optional_bool) + self.assertEqual(0, message.optional_nested_message.bb) + + def testAssignUnknownEnum(self): + """Assigning an unknown enum value is allowed and preserves the value.""" + m = unittest_proto3_arena_pb2.TestAllTypes() + + m.optional_nested_enum = 1234567 + self.assertEqual(1234567, m.optional_nested_enum) + m.repeated_nested_enum.append(22334455) + self.assertEqual(22334455, m.repeated_nested_enum[0]) + # Assignment is a different code path than append for the C++ impl. + m.repeated_nested_enum[0] = 7654321 + self.assertEqual(7654321, m.repeated_nested_enum[0]) + serialized = m.SerializeToString() + + m2 = unittest_proto3_arena_pb2.TestAllTypes() + m2.ParseFromString(serialized) + self.assertEqual(1234567, m2.optional_nested_enum) + self.assertEqual(7654321, m2.repeated_nested_enum[0]) + + # Map isn't really a proto3-only feature. But there is no proto2 equivalent + # of google/protobuf/map_unittest.proto right now, so it's not easy to + # test both with the same test like we do for the other proto2/proto3 tests. + # (google/protobuf/map_protobuf_unittest.proto is very different in the set + # of messages and fields it contains). + def testScalarMapDefaults(self): + msg = map_unittest_pb2.TestMap() + + # Scalars start out unset. + self.assertFalse(-123 in msg.map_int32_int32) + self.assertFalse(-2**33 in msg.map_int64_int64) + self.assertFalse(123 in msg.map_uint32_uint32) + self.assertFalse(2**33 in msg.map_uint64_uint64) + self.assertFalse('abc' in msg.map_string_string) + self.assertFalse(888 in msg.map_int32_enum) + + # Accessing an unset key returns the default. + self.assertEqual(0, msg.map_int32_int32[-123]) + self.assertEqual(0, msg.map_int64_int64[-2**33]) + self.assertEqual(0, msg.map_uint32_uint32[123]) + self.assertEqual(0, msg.map_uint64_uint64[2**33]) + self.assertEqual('', msg.map_string_string['abc']) + self.assertEqual(0, msg.map_int32_enum[888]) + + # It also sets the value in the map + self.assertTrue(-123 in msg.map_int32_int32) + self.assertTrue(-2**33 in msg.map_int64_int64) + self.assertTrue(123 in msg.map_uint32_uint32) + self.assertTrue(2**33 in msg.map_uint64_uint64) + self.assertTrue('abc' in msg.map_string_string) + self.assertTrue(888 in msg.map_int32_enum) + + self.assertIsInstance(msg.map_string_string['abc'], six.text_type) + + # Accessing an unset key still throws TypeError if the type of the key + # is incorrect. + with self.assertRaises(TypeError): + msg.map_string_string[123] + + with self.assertRaises(TypeError): + 123 in msg.map_string_string + + def testMapGet(self): + # Need to test that get() properly returns the default, even though the dict + # has defaultdict-like semantics. + msg = map_unittest_pb2.TestMap() + + self.assertIsNone(msg.map_int32_int32.get(5)) + self.assertEqual(10, msg.map_int32_int32.get(5, 10)) + self.assertIsNone(msg.map_int32_int32.get(5)) + + msg.map_int32_int32[5] = 15 + self.assertEqual(15, msg.map_int32_int32.get(5)) + + self.assertIsNone(msg.map_int32_foreign_message.get(5)) + self.assertEqual(10, msg.map_int32_foreign_message.get(5, 10)) + + submsg = msg.map_int32_foreign_message[5] + self.assertIs(submsg, msg.map_int32_foreign_message.get(5)) + + def testScalarMap(self): + msg = map_unittest_pb2.TestMap() + + self.assertEqual(0, len(msg.map_int32_int32)) + self.assertFalse(5 in msg.map_int32_int32) + + msg.map_int32_int32[-123] = -456 + msg.map_int64_int64[-2**33] = -2**34 + msg.map_uint32_uint32[123] = 456 + msg.map_uint64_uint64[2**33] = 2**34 + msg.map_string_string['abc'] = '123' + msg.map_int32_enum[888] = 2 + + self.assertEqual([], msg.FindInitializationErrors()) + + self.assertEqual(1, len(msg.map_string_string)) + + # Bad key. + with self.assertRaises(TypeError): + msg.map_string_string[123] = '123' + + # Verify that trying to assign a bad key doesn't actually add a member to + # the map. + self.assertEqual(1, len(msg.map_string_string)) + + # Bad value. + with self.assertRaises(TypeError): + msg.map_string_string['123'] = 123 + + serialized = msg.SerializeToString() + msg2 = map_unittest_pb2.TestMap() + msg2.ParseFromString(serialized) + + # Bad key. + with self.assertRaises(TypeError): + msg2.map_string_string[123] = '123' + + # Bad value. + with self.assertRaises(TypeError): + msg2.map_string_string['123'] = 123 + + self.assertEqual(-456, msg2.map_int32_int32[-123]) + self.assertEqual(-2**34, msg2.map_int64_int64[-2**33]) + self.assertEqual(456, msg2.map_uint32_uint32[123]) + self.assertEqual(2**34, msg2.map_uint64_uint64[2**33]) + self.assertEqual('123', msg2.map_string_string['abc']) + self.assertEqual(2, msg2.map_int32_enum[888]) + + def testStringUnicodeConversionInMap(self): + msg = map_unittest_pb2.TestMap() + + unicode_obj = u'\u1234' + bytes_obj = unicode_obj.encode('utf8') + + msg.map_string_string[bytes_obj] = bytes_obj + + (key, value) = list(msg.map_string_string.items())[0] + + self.assertEqual(key, unicode_obj) + self.assertEqual(value, unicode_obj) + + self.assertIsInstance(key, six.text_type) + self.assertIsInstance(value, six.text_type) + + def testMessageMap(self): + msg = map_unittest_pb2.TestMap() + + self.assertEqual(0, len(msg.map_int32_foreign_message)) + self.assertFalse(5 in msg.map_int32_foreign_message) + + msg.map_int32_foreign_message[123] + # get_or_create() is an alias for getitem. + msg.map_int32_foreign_message.get_or_create(-456) + + self.assertEqual(2, len(msg.map_int32_foreign_message)) + self.assertIn(123, msg.map_int32_foreign_message) + self.assertIn(-456, msg.map_int32_foreign_message) + self.assertEqual(2, len(msg.map_int32_foreign_message)) + + # Bad key. + with self.assertRaises(TypeError): + msg.map_int32_foreign_message['123'] + + # Can't assign directly to submessage. + with self.assertRaises(ValueError): + msg.map_int32_foreign_message[999] = msg.map_int32_foreign_message[123] + + # Verify that trying to assign a bad key doesn't actually add a member to + # the map. + self.assertEqual(2, len(msg.map_int32_foreign_message)) + + serialized = msg.SerializeToString() + msg2 = map_unittest_pb2.TestMap() + msg2.ParseFromString(serialized) + + self.assertEqual(2, len(msg2.map_int32_foreign_message)) + self.assertIn(123, msg2.map_int32_foreign_message) + self.assertIn(-456, msg2.map_int32_foreign_message) + self.assertEqual(2, len(msg2.map_int32_foreign_message)) + + def testMergeFrom(self): + msg = map_unittest_pb2.TestMap() + msg.map_int32_int32[12] = 34 + msg.map_int32_int32[56] = 78 + msg.map_int64_int64[22] = 33 + msg.map_int32_foreign_message[111].c = 5 + msg.map_int32_foreign_message[222].c = 10 + + msg2 = map_unittest_pb2.TestMap() + msg2.map_int32_int32[12] = 55 + msg2.map_int64_int64[88] = 99 + msg2.map_int32_foreign_message[222].c = 15 + + msg2.MergeFrom(msg) + + self.assertEqual(34, msg2.map_int32_int32[12]) + self.assertEqual(78, msg2.map_int32_int32[56]) + self.assertEqual(33, msg2.map_int64_int64[22]) + self.assertEqual(99, msg2.map_int64_int64[88]) + self.assertEqual(5, msg2.map_int32_foreign_message[111].c) + self.assertEqual(10, msg2.map_int32_foreign_message[222].c) + + # Verify that there is only one entry per key, even though the MergeFrom + # may have internally created multiple entries for a single key in the + # list representation. + as_dict = {} + for key in msg2.map_int32_foreign_message: + self.assertFalse(key in as_dict) + as_dict[key] = msg2.map_int32_foreign_message[key].c + + self.assertEqual({111: 5, 222: 10}, as_dict) + + # Special case: test that delete of item really removes the item, even if + # there might have physically been duplicate keys due to the previous merge. + # This is only a special case for the C++ implementation which stores the + # map as an array. + del msg2.map_int32_int32[12] + self.assertFalse(12 in msg2.map_int32_int32) + + del msg2.map_int32_foreign_message[222] + self.assertFalse(222 in msg2.map_int32_foreign_message) + + def testIntegerMapWithLongs(self): + msg = map_unittest_pb2.TestMap() + msg.map_int32_int32[long(-123)] = long(-456) + msg.map_int64_int64[long(-2**33)] = long(-2**34) + msg.map_uint32_uint32[long(123)] = long(456) + msg.map_uint64_uint64[long(2**33)] = long(2**34) + + serialized = msg.SerializeToString() + msg2 = map_unittest_pb2.TestMap() + msg2.ParseFromString(serialized) + + self.assertEqual(-456, msg2.map_int32_int32[-123]) + self.assertEqual(-2**34, msg2.map_int64_int64[-2**33]) + self.assertEqual(456, msg2.map_uint32_uint32[123]) + self.assertEqual(2**34, msg2.map_uint64_uint64[2**33]) + + def testMapAssignmentCausesPresence(self): + msg = map_unittest_pb2.TestMapSubmessage() + msg.test_map.map_int32_int32[123] = 456 + + serialized = msg.SerializeToString() + msg2 = map_unittest_pb2.TestMapSubmessage() + msg2.ParseFromString(serialized) + + self.assertEqual(msg, msg2) + + # Now test that various mutations of the map properly invalidate the + # cached size of the submessage. + msg.test_map.map_int32_int32[888] = 999 + serialized = msg.SerializeToString() + msg2.ParseFromString(serialized) + self.assertEqual(msg, msg2) + + msg.test_map.map_int32_int32.clear() + serialized = msg.SerializeToString() + msg2.ParseFromString(serialized) + self.assertEqual(msg, msg2) + + def testMapAssignmentCausesPresenceForSubmessages(self): + msg = map_unittest_pb2.TestMapSubmessage() + msg.test_map.map_int32_foreign_message[123].c = 5 + + serialized = msg.SerializeToString() + msg2 = map_unittest_pb2.TestMapSubmessage() + msg2.ParseFromString(serialized) + + self.assertEqual(msg, msg2) + + # Now test that various mutations of the map properly invalidate the + # cached size of the submessage. + msg.test_map.map_int32_foreign_message[888].c = 7 + serialized = msg.SerializeToString() + msg2.ParseFromString(serialized) + self.assertEqual(msg, msg2) + + msg.test_map.map_int32_foreign_message[888].MergeFrom( + msg.test_map.map_int32_foreign_message[123]) + serialized = msg.SerializeToString() + msg2.ParseFromString(serialized) + self.assertEqual(msg, msg2) + + msg.test_map.map_int32_foreign_message.clear() + serialized = msg.SerializeToString() + msg2.ParseFromString(serialized) + self.assertEqual(msg, msg2) + + def testModifyMapWhileIterating(self): + msg = map_unittest_pb2.TestMap() + + string_string_iter = iter(msg.map_string_string) + int32_foreign_iter = iter(msg.map_int32_foreign_message) + + msg.map_string_string['abc'] = '123' + msg.map_int32_foreign_message[5].c = 5 + + with self.assertRaises(RuntimeError): + for key in string_string_iter: + pass + + with self.assertRaises(RuntimeError): + for key in int32_foreign_iter: + pass + + def testSubmessageMap(self): + msg = map_unittest_pb2.TestMap() + + submsg = msg.map_int32_foreign_message[111] + self.assertIs(submsg, msg.map_int32_foreign_message[111]) + self.assertIsInstance(submsg, unittest_pb2.ForeignMessage) + + submsg.c = 5 + + serialized = msg.SerializeToString() + msg2 = map_unittest_pb2.TestMap() + msg2.ParseFromString(serialized) + + self.assertEqual(5, msg2.map_int32_foreign_message[111].c) + + # Doesn't allow direct submessage assignment. + with self.assertRaises(ValueError): + msg.map_int32_foreign_message[88] = unittest_pb2.ForeignMessage() + + def testMapIteration(self): + msg = map_unittest_pb2.TestMap() + + for k, v in msg.map_int32_int32.items(): + # Should not be reached. + self.assertTrue(False) + + msg.map_int32_int32[2] = 4 + msg.map_int32_int32[3] = 6 + msg.map_int32_int32[4] = 8 + self.assertEqual(3, len(msg.map_int32_int32)) + + matching_dict = {2: 4, 3: 6, 4: 8} + self.assertMapIterEquals(msg.map_int32_int32.items(), matching_dict) + + def testMapIterationClearMessage(self): + # Iterator needs to work even if message and map are deleted. + msg = map_unittest_pb2.TestMap() + + msg.map_int32_int32[2] = 4 + msg.map_int32_int32[3] = 6 + msg.map_int32_int32[4] = 8 + + it = msg.map_int32_int32.items() + del msg + + matching_dict = {2: 4, 3: 6, 4: 8} + self.assertMapIterEquals(it, matching_dict) + + def testMapConstruction(self): + msg = map_unittest_pb2.TestMap(map_int32_int32={1: 2, 3: 4}) + self.assertEqual(2, msg.map_int32_int32[1]) + self.assertEqual(4, msg.map_int32_int32[3]) + + msg = map_unittest_pb2.TestMap( + map_int32_foreign_message={3: unittest_pb2.ForeignMessage(c=5)}) + self.assertEqual(5, msg.map_int32_foreign_message[3].c) + + def testMapValidAfterFieldCleared(self): + # Map needs to work even if field is cleared. + # For the C++ implementation this tests the correctness of + # ScalarMapContainer::Release() + msg = map_unittest_pb2.TestMap() + int32_map = msg.map_int32_int32 + + int32_map[2] = 4 + int32_map[3] = 6 + int32_map[4] = 8 + + msg.ClearField('map_int32_int32') + self.assertEqual(b'', msg.SerializeToString()) + matching_dict = {2: 4, 3: 6, 4: 8} + self.assertMapIterEquals(int32_map.items(), matching_dict) + + def testMessageMapValidAfterFieldCleared(self): + # Map needs to work even if field is cleared. + # For the C++ implementation this tests the correctness of + # ScalarMapContainer::Release() + msg = map_unittest_pb2.TestMap() + int32_foreign_message = msg.map_int32_foreign_message + + int32_foreign_message[2].c = 5 + + msg.ClearField('map_int32_foreign_message') + self.assertEqual(b'', msg.SerializeToString()) + self.assertTrue(2 in int32_foreign_message.keys()) + + def testMapIterInvalidatedByClearField(self): + # Map iterator is invalidated when field is cleared. + # But this case does need to not crash the interpreter. + # For the C++ implementation this tests the correctness of + # ScalarMapContainer::Release() + msg = map_unittest_pb2.TestMap() + + it = iter(msg.map_int32_int32) + + msg.ClearField('map_int32_int32') + with self.assertRaises(RuntimeError): + for _ in it: + pass + + it = iter(msg.map_int32_foreign_message) + msg.ClearField('map_int32_foreign_message') + with self.assertRaises(RuntimeError): + for _ in it: + pass + + def testMapDelete(self): + msg = map_unittest_pb2.TestMap() + + self.assertEqual(0, len(msg.map_int32_int32)) + + msg.map_int32_int32[4] = 6 + self.assertEqual(1, len(msg.map_int32_int32)) + + with self.assertRaises(KeyError): + del msg.map_int32_int32[88] + + del msg.map_int32_int32[4] + self.assertEqual(0, len(msg.map_int32_int32)) + + def testMapsAreMapping(self): + msg = map_unittest_pb2.TestMap() + self.assertIsInstance(msg.map_int32_int32, collections.Mapping) + self.assertIsInstance(msg.map_int32_int32, collections.MutableMapping) + self.assertIsInstance(msg.map_int32_foreign_message, collections.Mapping) + self.assertIsInstance(msg.map_int32_foreign_message, + collections.MutableMapping) + + def testMapFindInitializationErrorsSmokeTest(self): + msg = map_unittest_pb2.TestMap() + msg.map_string_string['abc'] = '123' + msg.map_int32_int32[35] = 64 + msg.map_string_foreign_message['foo'].c = 5 + self.assertEqual(0, len(msg.FindInitializationErrors())) + + def testAnyMessage(self): + # Creates and sets message. + msg = any_test_pb2.TestAny() + msg_descriptor = msg.DESCRIPTOR + all_types = unittest_pb2.TestAllTypes() + all_descriptor = all_types.DESCRIPTOR + all_types.repeated_string.append(u'\u00fc\ua71f') + # Packs to Any. + msg.value.Pack(all_types) + self.assertEqual(msg.value.type_url, + 'type.googleapis.com/%s' % all_descriptor.full_name) + self.assertEqual(msg.value.value, + all_types.SerializeToString()) + # Tests Is() method. + self.assertTrue(msg.value.Is(all_descriptor)) + self.assertFalse(msg.value.Is(msg_descriptor)) + # Unpacks Any. + unpacked_message = unittest_pb2.TestAllTypes() + self.assertTrue(msg.value.Unpack(unpacked_message)) + self.assertEqual(all_types, unpacked_message) + # Unpacks to different type. + self.assertFalse(msg.value.Unpack(msg)) + # Only Any messages have Pack method. + try: + msg.Pack(all_types) + except AttributeError: + pass + else: + raise AttributeError('%s should not have Pack method.' % + msg_descriptor.full_name) + + + +class ValidTypeNamesTest(unittest.TestCase): + + def assertImportFromName(self, msg, base_name): + # Parse to extra 'some.name' as a string. + tp_name = str(type(msg)).split("'")[1] + valid_names = ('Repeated%sContainer' % base_name, + 'Repeated%sFieldContainer' % base_name) + self.assertTrue(any(tp_name.endswith(v) for v in valid_names), + '%r does end with any of %r' % (tp_name, valid_names)) + + parts = tp_name.split('.') + class_name = parts[-1] + module_name = '.'.join(parts[:-1]) + __import__(module_name, fromlist=[class_name]) + + def testTypeNamesCanBeImported(self): + # If import doesn't work, pickling won't work either. + pb = unittest_pb2.TestAllTypes() + self.assertImportFromName(pb.repeated_int32, 'Scalar') + self.assertImportFromName(pb.repeated_nested_message, 'Composite') + +class PackedFieldTest(unittest.TestCase): + + def setMessage(self, message): + message.repeated_int32.append(1) + message.repeated_int64.append(1) + message.repeated_uint32.append(1) + message.repeated_uint64.append(1) + message.repeated_sint32.append(1) + message.repeated_sint64.append(1) + message.repeated_fixed32.append(1) + message.repeated_fixed64.append(1) + message.repeated_sfixed32.append(1) + message.repeated_sfixed64.append(1) + message.repeated_float.append(1.0) + message.repeated_double.append(1.0) + message.repeated_bool.append(True) + message.repeated_nested_enum.append(1) + + def testPackedFields(self): + message = packed_field_test_pb2.TestPackedTypes() + self.setMessage(message) + golden_data = (b'\x0A\x01\x01' + b'\x12\x01\x01' + b'\x1A\x01\x01' + b'\x22\x01\x01' + b'\x2A\x01\x02' + b'\x32\x01\x02' + b'\x3A\x04\x01\x00\x00\x00' + b'\x42\x08\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x4A\x04\x01\x00\x00\x00' + b'\x52\x08\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x5A\x04\x00\x00\x80\x3f' + b'\x62\x08\x00\x00\x00\x00\x00\x00\xf0\x3f' + b'\x6A\x01\x01' + b'\x72\x01\x01') + self.assertEqual(golden_data, message.SerializeToString()) + + def testUnpackedFields(self): + message = packed_field_test_pb2.TestUnpackedTypes() + self.setMessage(message) + golden_data = (b'\x08\x01' + b'\x10\x01' + b'\x18\x01' + b'\x20\x01' + b'\x28\x02' + b'\x30\x02' + b'\x3D\x01\x00\x00\x00' + b'\x41\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x4D\x01\x00\x00\x00' + b'\x51\x01\x00\x00\x00\x00\x00\x00\x00' + b'\x5D\x00\x00\x80\x3f' + b'\x61\x00\x00\x00\x00\x00\x00\xf0\x3f' + b'\x68\x01' + b'\x70\x01') + self.assertEqual(golden_data, message.SerializeToString()) + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/missing_enum_values_pb2.py b/deps/google/protobuf/internal/missing_enum_values_pb2.py new file mode 100644 index 00000000..4767f036 --- /dev/null +++ b/deps/google/protobuf/internal/missing_enum_values_pb2.py @@ -0,0 +1,229 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/missing_enum_values.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/missing_enum_values.proto', + package='google.protobuf.python.internal', + syntax='proto2', + serialized_pb=_b('\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_TESTENUMVALUES_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.TestEnumValues.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ZERO', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ONE', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=378, + serialized_end=409, +) +_sym_db.RegisterEnumDescriptor(_TESTENUMVALUES_NESTEDENUM) + +_TESTMISSINGENUMVALUES_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.TestMissingEnumValues.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TWO', index=0, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=730, + serialized_end=751, +) +_sym_db.RegisterEnumDescriptor(_TESTMISSINGENUMVALUES_NESTEDENUM) + + +_TESTENUMVALUES = _descriptor.Descriptor( + name='TestEnumValues', + full_name='google.protobuf.python.internal.TestEnumValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.optional_nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.repeated_nested_enum', index=1, + number=2, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='packed_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.packed_nested_enum', index=2, + number=3, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _TESTENUMVALUES_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=88, + serialized_end=409, +) + + +_TESTMISSINGENUMVALUES = _descriptor.Descriptor( + name='TestMissingEnumValues', + full_name='google.protobuf.python.internal.TestMissingEnumValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.optional_nested_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.repeated_nested_enum', index=1, + number=2, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='packed_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.packed_nested_enum', index=2, + number=3, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _TESTMISSINGENUMVALUES_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=412, + serialized_end=751, +) + + +_JUSTSTRING = _descriptor.Descriptor( + name='JustString', + full_name='google.protobuf.python.internal.JustString', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dummy', full_name='google.protobuf.python.internal.JustString.dummy', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=753, + serialized_end=780, +) + +_TESTENUMVALUES.fields_by_name['optional_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM +_TESTENUMVALUES.fields_by_name['repeated_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM +_TESTENUMVALUES.fields_by_name['packed_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM +_TESTENUMVALUES_NESTEDENUM.containing_type = _TESTENUMVALUES +_TESTMISSINGENUMVALUES.fields_by_name['optional_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM +_TESTMISSINGENUMVALUES.fields_by_name['repeated_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM +_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM +_TESTMISSINGENUMVALUES_NESTEDENUM.containing_type = _TESTMISSINGENUMVALUES +DESCRIPTOR.message_types_by_name['TestEnumValues'] = _TESTENUMVALUES +DESCRIPTOR.message_types_by_name['TestMissingEnumValues'] = _TESTMISSINGENUMVALUES +DESCRIPTOR.message_types_by_name['JustString'] = _JUSTSTRING + +TestEnumValues = _reflection.GeneratedProtocolMessageType('TestEnumValues', (_message.Message,), dict( + DESCRIPTOR = _TESTENUMVALUES, + __module__ = 'google.protobuf.internal.missing_enum_values_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestEnumValues) + )) +_sym_db.RegisterMessage(TestEnumValues) + +TestMissingEnumValues = _reflection.GeneratedProtocolMessageType('TestMissingEnumValues', (_message.Message,), dict( + DESCRIPTOR = _TESTMISSINGENUMVALUES, + __module__ = 'google.protobuf.internal.missing_enum_values_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestMissingEnumValues) + )) +_sym_db.RegisterMessage(TestMissingEnumValues) + +JustString = _reflection.GeneratedProtocolMessageType('JustString', (_message.Message,), dict( + DESCRIPTOR = _JUSTSTRING, + __module__ = 'google.protobuf.internal.missing_enum_values_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.JustString) + )) +_sym_db.RegisterMessage(JustString) + + +_TESTENUMVALUES.fields_by_name['packed_nested_enum'].has_options = True +_TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum'].has_options = True +_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/more_extensions_dynamic_pb2.py b/deps/google/protobuf/internal/more_extensions_dynamic_pb2.py new file mode 100644 index 00000000..e1478004 --- /dev/null +++ b/deps/google/protobuf/internal/more_extensions_dynamic_pb2.py @@ -0,0 +1,92 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions_dynamic.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/more_extensions_dynamic.proto', + package='google.protobuf.internal', + syntax='proto2', + serialized_pb=_b('\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType') + , + dependencies=[google_dot_protobuf_dot_internal_dot_more__extensions__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +DYNAMIC_INT32_EXTENSION_FIELD_NUMBER = 100 +dynamic_int32_extension = _descriptor.FieldDescriptor( + name='dynamic_int32_extension', full_name='google.protobuf.internal.dynamic_int32_extension', index=0, + number=100, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DYNAMIC_MESSAGE_EXTENSION_FIELD_NUMBER = 101 +dynamic_message_extension = _descriptor.FieldDescriptor( + name='dynamic_message_extension', full_name='google.protobuf.internal.dynamic_message_extension', index=1, + number=101, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_DYNAMICMESSAGETYPE = _descriptor.Descriptor( + name='DynamicMessageType', + full_name='google.protobuf.internal.DynamicMessageType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='google.protobuf.internal.DynamicMessageType.a', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=132, + serialized_end=163, +) + +DESCRIPTOR.message_types_by_name['DynamicMessageType'] = _DYNAMICMESSAGETYPE +DESCRIPTOR.extensions_by_name['dynamic_int32_extension'] = dynamic_int32_extension +DESCRIPTOR.extensions_by_name['dynamic_message_extension'] = dynamic_message_extension + +DynamicMessageType = _reflection.GeneratedProtocolMessageType('DynamicMessageType', (_message.Message,), dict( + DESCRIPTOR = _DYNAMICMESSAGETYPE, + __module__ = 'google.protobuf.internal.more_extensions_dynamic_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.DynamicMessageType) + )) +_sym_db.RegisterMessage(DynamicMessageType) + +google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) +dynamic_message_extension.message_type = _DYNAMICMESSAGETYPE +google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/more_extensions_pb2.py b/deps/google/protobuf/internal/more_extensions_pb2.py new file mode 100644 index 00000000..c2a03aa0 --- /dev/null +++ b/deps/google/protobuf/internal/more_extensions_pb2.py @@ -0,0 +1,183 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_extensions.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/more_extensions.proto', + package='google.protobuf.internal', + syntax='proto2', + serialized_pb=_b('\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"P\n\x0fTopLevelMessage\x12=\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessage\"\x1b\n\x0f\x45xtendedMessage*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +OPTIONAL_INT_EXTENSION_FIELD_NUMBER = 1 +optional_int_extension = _descriptor.FieldDescriptor( + name='optional_int_extension', full_name='google.protobuf.internal.optional_int_extension', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_MESSAGE_EXTENSION_FIELD_NUMBER = 2 +optional_message_extension = _descriptor.FieldDescriptor( + name='optional_message_extension', full_name='google.protobuf.internal.optional_message_extension', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_INT_EXTENSION_FIELD_NUMBER = 3 +repeated_int_extension = _descriptor.FieldDescriptor( + name='repeated_int_extension', full_name='google.protobuf.internal.repeated_int_extension', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_MESSAGE_EXTENSION_FIELD_NUMBER = 4 +repeated_message_extension = _descriptor.FieldDescriptor( + name='repeated_message_extension', full_name='google.protobuf.internal.repeated_message_extension', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_TOPLEVELMESSAGE = _descriptor.Descriptor( + name='TopLevelMessage', + full_name='google.protobuf.internal.TopLevelMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='submessage', full_name='google.protobuf.internal.TopLevelMessage.submessage', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=76, + serialized_end=156, +) + + +_EXTENDEDMESSAGE = _descriptor.Descriptor( + name='ExtendedMessage', + full_name='google.protobuf.internal.ExtendedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1, 536870912), ], + oneofs=[ + ], + serialized_start=158, + serialized_end=185, +) + + +_FOREIGNMESSAGE = _descriptor.Descriptor( + name='ForeignMessage', + full_name='google.protobuf.internal.ForeignMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foreign_message_int', full_name='google.protobuf.internal.ForeignMessage.foreign_message_int', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=187, + serialized_end=232, +) + +_TOPLEVELMESSAGE.fields_by_name['submessage'].message_type = _EXTENDEDMESSAGE +DESCRIPTOR.message_types_by_name['TopLevelMessage'] = _TOPLEVELMESSAGE +DESCRIPTOR.message_types_by_name['ExtendedMessage'] = _EXTENDEDMESSAGE +DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE +DESCRIPTOR.extensions_by_name['optional_int_extension'] = optional_int_extension +DESCRIPTOR.extensions_by_name['optional_message_extension'] = optional_message_extension +DESCRIPTOR.extensions_by_name['repeated_int_extension'] = repeated_int_extension +DESCRIPTOR.extensions_by_name['repeated_message_extension'] = repeated_message_extension + +TopLevelMessage = _reflection.GeneratedProtocolMessageType('TopLevelMessage', (_message.Message,), dict( + DESCRIPTOR = _TOPLEVELMESSAGE, + __module__ = 'google.protobuf.internal.more_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.TopLevelMessage) + )) +_sym_db.RegisterMessage(TopLevelMessage) + +ExtendedMessage = _reflection.GeneratedProtocolMessageType('ExtendedMessage', (_message.Message,), dict( + DESCRIPTOR = _EXTENDEDMESSAGE, + __module__ = 'google.protobuf.internal.more_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.ExtendedMessage) + )) +_sym_db.RegisterMessage(ExtendedMessage) + +ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict( + DESCRIPTOR = _FOREIGNMESSAGE, + __module__ = 'google.protobuf.internal.more_extensions_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.ForeignMessage) + )) +_sym_db.RegisterMessage(ForeignMessage) + +ExtendedMessage.RegisterExtension(optional_int_extension) +optional_message_extension.message_type = _FOREIGNMESSAGE +ExtendedMessage.RegisterExtension(optional_message_extension) +ExtendedMessage.RegisterExtension(repeated_int_extension) +repeated_message_extension.message_type = _FOREIGNMESSAGE +ExtendedMessage.RegisterExtension(repeated_message_extension) + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/more_messages_pb2.py b/deps/google/protobuf/internal/more_messages_pb2.py new file mode 100644 index 00000000..4ad9c9ef --- /dev/null +++ b/deps/google/protobuf/internal/more_messages_pb2.py @@ -0,0 +1,103 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/more_messages.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/more_messages.proto', + package='google.protobuf.internal', + syntax='proto2', + serialized_pb=_b('\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +OPTIONAL_UINT64_FIELD_NUMBER = 4 +optional_uint64 = _descriptor.FieldDescriptor( + name='optional_uint64', full_name='google.protobuf.internal.optional_uint64', index=0, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_INT64_FIELD_NUMBER = 2 +optional_int64 = _descriptor.FieldDescriptor( + name='optional_int64', full_name='google.protobuf.internal.optional_int64', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_OUTOFORDERFIELDS = _descriptor.Descriptor( + name='OutOfOrderFields', + full_name='google.protobuf.internal.OutOfOrderFields', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_sint32', full_name='google.protobuf.internal.OutOfOrderFields.optional_sint32', index=0, + number=5, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint32', full_name='google.protobuf.internal.OutOfOrderFields.optional_uint32', index=1, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_int32', full_name='google.protobuf.internal.OutOfOrderFields.optional_int32', index=2, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(4, 5), (2, 3), ], + oneofs=[ + ], + serialized_start=74, + serialized_end=178, +) + +DESCRIPTOR.message_types_by_name['OutOfOrderFields'] = _OUTOFORDERFIELDS +DESCRIPTOR.extensions_by_name['optional_uint64'] = optional_uint64 +DESCRIPTOR.extensions_by_name['optional_int64'] = optional_int64 + +OutOfOrderFields = _reflection.GeneratedProtocolMessageType('OutOfOrderFields', (_message.Message,), dict( + DESCRIPTOR = _OUTOFORDERFIELDS, + __module__ = 'google.protobuf.internal.more_messages_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.internal.OutOfOrderFields) + )) +_sym_db.RegisterMessage(OutOfOrderFields) + +OutOfOrderFields.RegisterExtension(optional_uint64) +OutOfOrderFields.RegisterExtension(optional_int64) + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/packed_field_test_pb2.py b/deps/google/protobuf/internal/packed_field_test_pb2.py new file mode 100644 index 00000000..77b63849 --- /dev/null +++ b/deps/google/protobuf/internal/packed_field_test_pb2.py @@ -0,0 +1,348 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/internal/packed_field_test.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/internal/packed_field_test.proto', + package='google.protobuf.python.internal', + syntax='proto3', + serialized_pb=_b('\n0google/protobuf/internal/packed_field_test.proto\x12\x1fgoogle.protobuf.python.internal\"\xdb\x03\n\x0fTestPackedTypes\x12\x16\n\x0erepeated_int32\x18\x01 \x03(\x05\x12\x16\n\x0erepeated_int64\x18\x02 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18\x03 \x03(\r\x12\x17\n\x0frepeated_uint64\x18\x04 \x03(\x04\x12\x17\n\x0frepeated_sint32\x18\x05 \x03(\x11\x12\x17\n\x0frepeated_sint64\x18\x06 \x03(\x12\x12\x18\n\x10repeated_fixed32\x18\x07 \x03(\x07\x12\x18\n\x10repeated_fixed64\x18\x08 \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\t \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18\n \x03(\x10\x12\x16\n\x0erepeated_float\x18\x0b \x03(\x02\x12\x17\n\x0frepeated_double\x18\x0c \x03(\x01\x12\x15\n\rrepeated_bool\x18\r \x03(\x08\x12Y\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32;.google.protobuf.python.internal.TestPackedTypes.NestedEnum\"\'\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x12\x07\n\x03\x42\x41Z\x10\x02\"\xec\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0erepeated_int32\x18\x01 \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0erepeated_int64\x18\x02 \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0frepeated_uint32\x18\x03 \x03(\rB\x02\x10\x00\x12\x1b\n\x0frepeated_uint64\x18\x04 \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint32\x18\x05 \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint64\x18\x06 \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed32\x18\x07 \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed64\x18\x08 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed32\x18\t \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed64\x18\n \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0erepeated_float\x18\x0b \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0frepeated_double\x18\x0c \x03(\x01\x42\x02\x10\x00\x12\x19\n\rrepeated_bool\x18\r \x03(\x08\x42\x02\x10\x00\x12]\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32;.google.protobuf.python.internal.TestPackedTypes.NestedEnumB\x02\x10\x00\x62\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_TESTPACKEDTYPES_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='google.protobuf.python.internal.TestPackedTypes.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAZ', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=522, + serialized_end=561, +) +_sym_db.RegisterEnumDescriptor(_TESTPACKEDTYPES_NESTEDENUM) + + +_TESTPACKEDTYPES = _descriptor.Descriptor( + name='TestPackedTypes', + full_name='google.protobuf.python.internal.TestPackedTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_int32', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_int64', index=1, + number=2, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_uint32', index=2, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_uint64', index=3, + number=4, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sint32', index=4, + number=5, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sint64', index=5, + number=6, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_fixed32', index=6, + number=7, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_fixed64', index=7, + number=8, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sfixed32', index=8, + number=9, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sfixed64', index=9, + number=10, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_float', index=10, + number=11, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_double', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_double', index=11, + number=12, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bool', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_bool', index=12, + number=13, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_nested_enum', index=13, + number=14, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _TESTPACKEDTYPES_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=86, + serialized_end=561, +) + + +_TESTUNPACKEDTYPES = _descriptor.Descriptor( + name='TestUnpackedTypes', + full_name='google.protobuf.python.internal.TestUnpackedTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_int32', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_int64', index=1, + number=2, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_uint32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_uint32', index=2, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_uint64', index=3, + number=4, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sint32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sint32', index=4, + number=5, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sint64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sint64', index=5, + number=6, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_fixed32', index=6, + number=7, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_fixed64', index=7, + number=8, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sfixed32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sfixed32', index=8, + number=9, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sfixed64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sfixed64', index=9, + number=10, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_float', index=10, + number=11, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_double', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_double', index=11, + number=12, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_bool', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_bool', index=12, + number=13, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_nested_enum', index=13, + number=14, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=564, + serialized_end=1056, +) + +_TESTPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTPACKEDTYPES_NESTEDENUM +_TESTPACKEDTYPES_NESTEDENUM.containing_type = _TESTPACKEDTYPES +_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTPACKEDTYPES_NESTEDENUM +DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES +DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES + +TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict( + DESCRIPTOR = _TESTPACKEDTYPES, + __module__ = 'google.protobuf.internal.packed_field_test_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestPackedTypes) + )) +_sym_db.RegisterMessage(TestPackedTypes) + +TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict( + DESCRIPTOR = _TESTUNPACKEDTYPES, + __module__ = 'google.protobuf.internal.packed_field_test_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestUnpackedTypes) + )) +_sym_db.RegisterMessage(TestUnpackedTypes) + + +_TESTUNPACKEDTYPES.fields_by_name['repeated_int32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_int64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_float'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_double'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_bool'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/proto_builder_test.py b/deps/google/protobuf/internal/proto_builder_test.py new file mode 100644 index 00000000..822ad895 --- /dev/null +++ b/deps/google/protobuf/internal/proto_builder_test.py @@ -0,0 +1,95 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests for google.protobuf.proto_builder.""" + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict #PY26 +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor_pool +from google.protobuf import proto_builder +from google.protobuf import text_format + + +class ProtoBuilderTest(unittest.TestCase): + + def setUp(self): + self.ordered_fields = OrderedDict([ + ('foo', descriptor_pb2.FieldDescriptorProto.TYPE_INT64), + ('bar', descriptor_pb2.FieldDescriptorProto.TYPE_STRING), + ]) + self._fields = dict(self.ordered_fields) + + def testMakeSimpleProtoClass(self): + """Test that we can create a proto class.""" + proto_cls = proto_builder.MakeSimpleProtoClass( + self._fields, + full_name='net.proto2.python.public.proto_builder_test.Test') + proto = proto_cls() + proto.foo = 12345 + proto.bar = 'asdf' + self.assertMultiLineEqual( + 'bar: "asdf"\nfoo: 12345\n', text_format.MessageToString(proto)) + + def testOrderedFields(self): + """Test that the field order is maintained when given an OrderedDict.""" + proto_cls = proto_builder.MakeSimpleProtoClass( + self.ordered_fields, + full_name='net.proto2.python.public.proto_builder_test.OrderedTest') + proto = proto_cls() + proto.foo = 12345 + proto.bar = 'asdf' + self.assertMultiLineEqual( + 'foo: 12345\nbar: "asdf"\n', text_format.MessageToString(proto)) + + def testMakeSameProtoClassTwice(self): + """Test that the DescriptorPool is used.""" + pool = descriptor_pool.DescriptorPool() + proto_cls1 = proto_builder.MakeSimpleProtoClass( + self._fields, + full_name='net.proto2.python.public.proto_builder_test.Test', + pool=pool) + proto_cls2 = proto_builder.MakeSimpleProtoClass( + self._fields, + full_name='net.proto2.python.public.proto_builder_test.Test', + pool=pool) + self.assertIs(proto_cls1.DESCRIPTOR, proto_cls2.DESCRIPTOR) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/python_message.py b/deps/google/protobuf/internal/python_message.py new file mode 100644 index 00000000..87f60666 --- /dev/null +++ b/deps/google/protobuf/internal/python_message.py @@ -0,0 +1,1520 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. +# +# TODO(robinson): Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +from io import BytesIO +import sys +import struct +import weakref + +import six +import six.moves.copyreg as copyreg + +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import symbol_database +from google.protobuf import text_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' + + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + class MyProtoClass(Message): + __metaclass__ = GeneratedProtocolMessageType + DESCRIPTOR = mydescriptor + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + cls._decoders_by_tag = {} + cls._extensions_by_name = {} + cls._extensions_by_number = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(cls._extensions_by_number), None) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__())) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO(kenton): Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, _FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name["value"] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_packable = (is_repeated and + wire_format.IsTypePackable(field_descriptor.type)) + if not is_packable: + is_packed = False + elif field_descriptor.containing_type.syntax == "proto2": + is_packed = (field_descriptor.has_options and + field_descriptor.GetOptions().packed) + else: + has_packed_false = (field_descriptor.has_options and + field_descriptor.GetOptions().HasField("packed") and + field_descriptor.GetOptions().packed == False) + is_packed = not has_packed_false + is_map_entry = _IsMapField(field_descriptor) + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._encoder = field_encoder + field_descriptor._sizer = sizer + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor) + + def AddDecoder(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + type_checkers.SupportsOpenEnums(field_descriptor)): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + + cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) + + AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], + False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extension_dict = descriptor.extensions_by_name + for extension_name, extension_field in extension_dict.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # _concrete_class may not yet be initialized. + message_type = field.message_type + def MakeSubMessageDefault(message): + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + six.reraise(type(exc), exc, sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, six.string_types): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError("%s() got an unexpected keyword argument '%s'" % + (message_descriptor.name, field_name)) + if field.label == _FieldDescriptor.LABEL_REPEATED: + copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + copy[key].MergeFrom(field_value[key]) + else: + copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + copy.add(**val) + else: + copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + copy.extend(field_value) + self._fields[field] = copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + copy = field._default_constructor(self) + new_val = field_value + if isinstance(field_value, dict): + new_val = field.message_type._concrete_class(**field_value) + try: + copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + "_FIELD_NUMBER" + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + _RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, property(getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + valid_values = set() + is_proto3 = field.containing_type.syntax == "proto3" + + def getter(self): + # TODO(protobuf-team): This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + clear_when_set_to_default = is_proto3 and not field.containing_oneof + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + new_value = type_checker.CheckValue(new_value) + if clear_when_set_to_default and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, property(getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO(robinson): Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, property(getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extension_dict = descriptor.extensions_by_name + for extension_name, extension_field in extension_dict.items(): + constant_name = extension_name.upper() + "_FIELD_NUMBER" + setattr(cls, constant_name, extension_field.number) + + +def _AddStaticMethods(cls): + # TODO(robinson): This probably needs to be thread-safe(?) + def RegisterExtension(extension_handle): + extension_handle.containing_type = cls.DESCRIPTOR + _AttachFieldHelpers(cls, extension_handle) + + # Try to insert our extension, failing if an extension with the same number + # already exists. + actual_handle = cls._extensions_by_number.setdefault( + extension_handle.number, extension_handle) + if actual_handle is not extension_handle: + raise AssertionError( + 'Extensions "%s" and "%s" both try to extend message type "%s" with ' + 'field number %d.' % + (extension_handle.full_name, actual_handle.full_name, + cls.DESCRIPTOR.full_name, extension_handle.number)) + + cls._extensions_by_name[extension_handle.full_name] = extension_handle + + handle = extension_handle # avoid line wrapping + if _IsMessageSetExtension(handle): + # MessageSet extension. Also register under type name. + cls._extensions_by_name[ + extension_handle.message_type.full_name] = extension_handle + + cls.RegisterExtension = staticmethod(RegisterExtension) + + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + +_Proto3HasError = 'Protocol message has no non-repeated submessage field "%s"' +_Proto2HasError = 'Protocol message has no non-repeated field "%s"' + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + is_proto3 = (message_descriptor.syntax == "proto3") + error_msg = _Proto3HasError if is_proto3 else _Proto2HasError + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and + not field.containing_oneof): + continue + hassable_fields[field.name] = field + + if not is_proto3: + # Fields inside oneofs are never repeated (enforced by the compiler). + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError: + raise ValueError(error_msg % field_name) + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s() has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, extension_handle): + _VerifyExtensionHandle(self, extension_handle) + + # Similar to ClearField(), above. + if extension_handle in self._fields: + del self._fields[extension_handle] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddClearMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + self._oneofs = {} + self._Modified() + cls.Clear = Clear + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, extension_handle): + _VerifyExtensionHandle(self, extension_handle) + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % extension_handle.full_name) + + if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(extension_handle) + return value is not None and value._is_present_in_parent + else: + return extension_handle in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is differnt from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + type_url = msg.type_url + db = symbol_database.Default() + + if not type_url: + return None + + # TODO(haberman): For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split("/")[-1] + descriptor = db.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + message_class = db.GetPrototype(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return False + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # Sort unknown fields because their order shouldn't affect equality test. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _AddSetListenerMethod(cls): + """Helper for _AddMessageMethods().""" + def SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + cls._SetListener = SetListener + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + for field_descriptor, field_value in self.ListFields(): + size += field_descriptor._sizer(field_value) + + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self): + # Check if the message has all of its required fields set. + errors = [] + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString() + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self): + out = BytesIO() + self._InternalSerialize(out.write) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes): + for field_descriptor, field_value in self.ListFields(): + field_descriptor._encoder(write_bytes, field_value) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + decoders_by_tag = cls._decoders_by_tag + is_proto3 = message_descriptor.syntax == "proto3" + + def InternalParse(self, buffer, pos, end): + self._Modified() + field_dict = self._fields + unknown_field_list = self._unknown_fields + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) + if field_decoder is None: + value_start_pos = new_pos + new_pos = local_SkipField(buffer, new_pos, end, tag_bytes) + if new_pos == -1: + return pos + if not is_proto3: + if not unknown_field_list: + unknown_field_list = self._unknown_fields = [] + unknown_field_list.append( + (tag_bytes, buffer[value_start_pos:new_pos])) + pos = new_pos + else: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_desc: + self._UpdateOneofState(field_desc) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type.has_options and + field.message_type.GetOptions().map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = "(%s)" % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = "%s[%s]." % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = "%s[%d]." % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + "." + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + "Parameter to MergeFrom() must be instance of same class: " + "expected %s got %s." % (cls.__name__, type(msg).__name__)) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddClearMethod(message_descriptor, cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddSetListenerMethod(cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.qux = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass + + +# TODO(robinson): Move elsewhere? This file is getting pretty ridiculous... +# TODO(robinson): Unify error handling of "unknown extension" crap. +# TODO(robinson): Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for supporting an indexable "Extensions" + field on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """extended_message: Message instance for which we are the Extensions dict. + """ + + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [ field for field in my_fields if field.is_extension ] + other_fields = [ field for field in other_fields if field.is_extension ] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necssary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + return self._extended_message._extensions_by_name.get(name, None) diff --git a/deps/google/protobuf/internal/reflection_test.py b/deps/google/protobuf/internal/reflection_test.py new file mode 100644 index 00000000..752f2f5d --- /dev/null +++ b/deps/google/protobuf/internal/reflection_test.py @@ -0,0 +1,2976 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unittest for reflection.py, which also indirectly tests the output of the +pure-Python protocol compiler. +""" + +import copy +import gc +import operator +import six +import struct + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import unittest_import_pb2 +from google.protobuf import unittest_mset_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import descriptor +from google.protobuf import message +from google.protobuf import reflection +from google.protobuf import text_format +from google.protobuf.internal import api_implementation +from google.protobuf.internal import more_extensions_pb2 +from google.protobuf.internal import more_messages_pb2 +from google.protobuf.internal import message_set_extensions_pb2 +from google.protobuf.internal import wire_format +from google.protobuf.internal import test_util +from google.protobuf.internal import decoder + + +class _MiniDecoder(object): + """Decodes a stream of values from a string. + + Once upon a time we actually had a class called decoder.Decoder. Then we + got rid of it during a redesign that made decoding much, much faster overall. + But a couple tests in this file used it to check that the serialized form of + a message was correct. So, this class implements just the methods that were + used by said tests, so that we don't have to rewrite the tests. + """ + + def __init__(self, bytes): + self._bytes = bytes + self._pos = 0 + + def ReadVarint(self): + result, self._pos = decoder._DecodeVarint(self._bytes, self._pos) + return result + + ReadInt32 = ReadVarint + ReadInt64 = ReadVarint + ReadUInt32 = ReadVarint + ReadUInt64 = ReadVarint + + def ReadSInt64(self): + return wire_format.ZigZagDecode(self.ReadVarint()) + + ReadSInt32 = ReadSInt64 + + def ReadFieldNumberAndWireType(self): + return wire_format.UnpackTag(self.ReadVarint()) + + def ReadFloat(self): + result = struct.unpack("\n\ndescriptor\x12%.protobuf_unittest.TestBadIdentifiers\x18\x65 \x01(\t:\x03\x62\x61r:>\n\nreflection\x12%.protobuf_unittest.TestBadIdentifiers\x18\x66 \x01(\t:\x03\x62\x61z:;\n\x07service\x12%.protobuf_unittest.TestBadIdentifiers\x18g \x01(\t:\x03quxB\x03\x90\x01\x01') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +MESSAGE_FIELD_NUMBER = 100 +message = _descriptor.FieldDescriptor( + name='message', full_name='protobuf_unittest.message', index=0, + number=100, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("foo").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DESCRIPTOR_FIELD_NUMBER = 101 +descriptor = _descriptor.FieldDescriptor( + name='descriptor', full_name='protobuf_unittest.descriptor', index=1, + number=101, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("bar").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REFLECTION_FIELD_NUMBER = 102 +reflection = _descriptor.FieldDescriptor( + name='reflection', full_name='protobuf_unittest.reflection', index=2, + number=102, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("baz").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SERVICE_FIELD_NUMBER = 103 +service = _descriptor.FieldDescriptor( + name='service', full_name='protobuf_unittest.service', index=3, + number=103, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("qux").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_TESTBADIDENTIFIERS = _descriptor.Descriptor( + name='TestBadIdentifiers', + full_name='protobuf_unittest.TestBadIdentifiers', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(100, 536870912), ], + oneofs=[ + ], + serialized_start=74, + serialized_end=104, +) + + +_ANOTHERMESSAGE = _descriptor.Descriptor( + name='AnotherMessage', + full_name='protobuf_unittest.AnotherMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=106, + serialized_end=122, +) + +DESCRIPTOR.message_types_by_name['TestBadIdentifiers'] = _TESTBADIDENTIFIERS +DESCRIPTOR.message_types_by_name['AnotherMessage'] = _ANOTHERMESSAGE +DESCRIPTOR.extensions_by_name['message'] = message +DESCRIPTOR.extensions_by_name['descriptor'] = descriptor +DESCRIPTOR.extensions_by_name['reflection'] = reflection +DESCRIPTOR.extensions_by_name['service'] = service + +TestBadIdentifiers = _reflection.GeneratedProtocolMessageType('TestBadIdentifiers', (_message.Message,), dict( + DESCRIPTOR = _TESTBADIDENTIFIERS, + __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestBadIdentifiers) + )) +_sym_db.RegisterMessage(TestBadIdentifiers) + +AnotherMessage = _reflection.GeneratedProtocolMessageType('AnotherMessage', (_message.Message,), dict( + DESCRIPTOR = _ANOTHERMESSAGE, + __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.AnotherMessage) + )) +_sym_db.RegisterMessage(AnotherMessage) + +TestBadIdentifiers.RegisterExtension(message) +TestBadIdentifiers.RegisterExtension(descriptor) +TestBadIdentifiers.RegisterExtension(reflection) +TestBadIdentifiers.RegisterExtension(service) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\220\001\001')) + +_ANOTHERSERVICE = _descriptor.ServiceDescriptor( + name='AnotherService', + full_name='protobuf_unittest.AnotherService', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=124, + serialized_end=140, + methods=[ +]) + +AnotherService = service_reflection.GeneratedServiceType('AnotherService', (_service.Service,), dict( + DESCRIPTOR = _ANOTHERSERVICE, + __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2' + )) + +AnotherService_Stub = service_reflection.GeneratedServiceStubType('AnotherService_Stub', (AnotherService,), dict( + DESCRIPTOR = _ANOTHERSERVICE, + __module__ = 'google.protobuf.internal.test_bad_identifiers_pb2' + )) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/internal/test_util.py b/deps/google/protobuf/internal/test_util.py new file mode 100644 index 00000000..ac88fa81 --- /dev/null +++ b/deps/google/protobuf/internal/test_util.py @@ -0,0 +1,694 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Utilities for Python proto2 tests. + +This is intentionally modeled on C++ code in +//google/protobuf/test_util.*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import os.path + +from google.protobuf import unittest_import_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import descriptor_pb2 + +# Tests whether the given TestAllTypes message is proto2 or not. +# This is used to gate several fields/features that only exist +# for the proto2 version of the message. +def IsProto2(message): + return message.DESCRIPTOR.syntax == "proto2" + +def SetAllNonLazyFields(message): + """Sets every non-lazy field in the message to a unique value. + + Args: + message: A TestAllTypes instance. + """ + + # + # Optional fields. + # + + message.optional_int32 = 101 + message.optional_int64 = 102 + message.optional_uint32 = 103 + message.optional_uint64 = 104 + message.optional_sint32 = 105 + message.optional_sint64 = 106 + message.optional_fixed32 = 107 + message.optional_fixed64 = 108 + message.optional_sfixed32 = 109 + message.optional_sfixed64 = 110 + message.optional_float = 111 + message.optional_double = 112 + message.optional_bool = True + message.optional_string = u'115' + message.optional_bytes = b'116' + + if IsProto2(message): + message.optionalgroup.a = 117 + message.optional_nested_message.bb = 118 + message.optional_foreign_message.c = 119 + message.optional_import_message.d = 120 + message.optional_public_import_message.e = 126 + + message.optional_nested_enum = unittest_pb2.TestAllTypes.BAZ + message.optional_foreign_enum = unittest_pb2.FOREIGN_BAZ + if IsProto2(message): + message.optional_import_enum = unittest_import_pb2.IMPORT_BAZ + + message.optional_string_piece = u'124' + message.optional_cord = u'125' + + # + # Repeated fields. + # + + message.repeated_int32.append(201) + message.repeated_int64.append(202) + message.repeated_uint32.append(203) + message.repeated_uint64.append(204) + message.repeated_sint32.append(205) + message.repeated_sint64.append(206) + message.repeated_fixed32.append(207) + message.repeated_fixed64.append(208) + message.repeated_sfixed32.append(209) + message.repeated_sfixed64.append(210) + message.repeated_float.append(211) + message.repeated_double.append(212) + message.repeated_bool.append(True) + message.repeated_string.append(u'215') + message.repeated_bytes.append(b'216') + + if IsProto2(message): + message.repeatedgroup.add().a = 217 + message.repeated_nested_message.add().bb = 218 + message.repeated_foreign_message.add().c = 219 + message.repeated_import_message.add().d = 220 + message.repeated_lazy_message.add().bb = 227 + + message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAR) + message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAR) + if IsProto2(message): + message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAR) + + message.repeated_string_piece.append(u'224') + message.repeated_cord.append(u'225') + + # Add a second one of each field. + message.repeated_int32.append(301) + message.repeated_int64.append(302) + message.repeated_uint32.append(303) + message.repeated_uint64.append(304) + message.repeated_sint32.append(305) + message.repeated_sint64.append(306) + message.repeated_fixed32.append(307) + message.repeated_fixed64.append(308) + message.repeated_sfixed32.append(309) + message.repeated_sfixed64.append(310) + message.repeated_float.append(311) + message.repeated_double.append(312) + message.repeated_bool.append(False) + message.repeated_string.append(u'315') + message.repeated_bytes.append(b'316') + + if IsProto2(message): + message.repeatedgroup.add().a = 317 + message.repeated_nested_message.add().bb = 318 + message.repeated_foreign_message.add().c = 319 + message.repeated_import_message.add().d = 320 + message.repeated_lazy_message.add().bb = 327 + + message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAZ) + message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAZ) + if IsProto2(message): + message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAZ) + + message.repeated_string_piece.append(u'324') + message.repeated_cord.append(u'325') + + # + # Fields that have defaults. + # + + if IsProto2(message): + message.default_int32 = 401 + message.default_int64 = 402 + message.default_uint32 = 403 + message.default_uint64 = 404 + message.default_sint32 = 405 + message.default_sint64 = 406 + message.default_fixed32 = 407 + message.default_fixed64 = 408 + message.default_sfixed32 = 409 + message.default_sfixed64 = 410 + message.default_float = 411 + message.default_double = 412 + message.default_bool = False + message.default_string = '415' + message.default_bytes = b'416' + + message.default_nested_enum = unittest_pb2.TestAllTypes.FOO + message.default_foreign_enum = unittest_pb2.FOREIGN_FOO + message.default_import_enum = unittest_import_pb2.IMPORT_FOO + + message.default_string_piece = '424' + message.default_cord = '425' + + message.oneof_uint32 = 601 + message.oneof_nested_message.bb = 602 + message.oneof_string = '603' + message.oneof_bytes = b'604' + + +def SetAllFields(message): + SetAllNonLazyFields(message) + message.optional_lazy_message.bb = 127 + + +def SetAllExtensions(message): + """Sets every extension in the message to a unique value. + + Args: + message: A unittest_pb2.TestAllExtensions instance. + """ + + extensions = message.Extensions + pb2 = unittest_pb2 + import_pb2 = unittest_import_pb2 + + # + # Optional fields. + # + + extensions[pb2.optional_int32_extension] = 101 + extensions[pb2.optional_int64_extension] = 102 + extensions[pb2.optional_uint32_extension] = 103 + extensions[pb2.optional_uint64_extension] = 104 + extensions[pb2.optional_sint32_extension] = 105 + extensions[pb2.optional_sint64_extension] = 106 + extensions[pb2.optional_fixed32_extension] = 107 + extensions[pb2.optional_fixed64_extension] = 108 + extensions[pb2.optional_sfixed32_extension] = 109 + extensions[pb2.optional_sfixed64_extension] = 110 + extensions[pb2.optional_float_extension] = 111 + extensions[pb2.optional_double_extension] = 112 + extensions[pb2.optional_bool_extension] = True + extensions[pb2.optional_string_extension] = u'115' + extensions[pb2.optional_bytes_extension] = b'116' + + extensions[pb2.optionalgroup_extension].a = 117 + extensions[pb2.optional_nested_message_extension].bb = 118 + extensions[pb2.optional_foreign_message_extension].c = 119 + extensions[pb2.optional_import_message_extension].d = 120 + extensions[pb2.optional_public_import_message_extension].e = 126 + extensions[pb2.optional_lazy_message_extension].bb = 127 + + extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ + extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ + extensions[pb2.optional_foreign_enum_extension] = pb2.FOREIGN_BAZ + extensions[pb2.optional_import_enum_extension] = import_pb2.IMPORT_BAZ + + extensions[pb2.optional_string_piece_extension] = u'124' + extensions[pb2.optional_cord_extension] = u'125' + + # + # Repeated fields. + # + + extensions[pb2.repeated_int32_extension].append(201) + extensions[pb2.repeated_int64_extension].append(202) + extensions[pb2.repeated_uint32_extension].append(203) + extensions[pb2.repeated_uint64_extension].append(204) + extensions[pb2.repeated_sint32_extension].append(205) + extensions[pb2.repeated_sint64_extension].append(206) + extensions[pb2.repeated_fixed32_extension].append(207) + extensions[pb2.repeated_fixed64_extension].append(208) + extensions[pb2.repeated_sfixed32_extension].append(209) + extensions[pb2.repeated_sfixed64_extension].append(210) + extensions[pb2.repeated_float_extension].append(211) + extensions[pb2.repeated_double_extension].append(212) + extensions[pb2.repeated_bool_extension].append(True) + extensions[pb2.repeated_string_extension].append(u'215') + extensions[pb2.repeated_bytes_extension].append(b'216') + + extensions[pb2.repeatedgroup_extension].add().a = 217 + extensions[pb2.repeated_nested_message_extension].add().bb = 218 + extensions[pb2.repeated_foreign_message_extension].add().c = 219 + extensions[pb2.repeated_import_message_extension].add().d = 220 + extensions[pb2.repeated_lazy_message_extension].add().bb = 227 + + extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAR) + extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAR) + extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAR) + + extensions[pb2.repeated_string_piece_extension].append(u'224') + extensions[pb2.repeated_cord_extension].append(u'225') + + # Append a second one of each field. + extensions[pb2.repeated_int32_extension].append(301) + extensions[pb2.repeated_int64_extension].append(302) + extensions[pb2.repeated_uint32_extension].append(303) + extensions[pb2.repeated_uint64_extension].append(304) + extensions[pb2.repeated_sint32_extension].append(305) + extensions[pb2.repeated_sint64_extension].append(306) + extensions[pb2.repeated_fixed32_extension].append(307) + extensions[pb2.repeated_fixed64_extension].append(308) + extensions[pb2.repeated_sfixed32_extension].append(309) + extensions[pb2.repeated_sfixed64_extension].append(310) + extensions[pb2.repeated_float_extension].append(311) + extensions[pb2.repeated_double_extension].append(312) + extensions[pb2.repeated_bool_extension].append(False) + extensions[pb2.repeated_string_extension].append(u'315') + extensions[pb2.repeated_bytes_extension].append(b'316') + + extensions[pb2.repeatedgroup_extension].add().a = 317 + extensions[pb2.repeated_nested_message_extension].add().bb = 318 + extensions[pb2.repeated_foreign_message_extension].add().c = 319 + extensions[pb2.repeated_import_message_extension].add().d = 320 + extensions[pb2.repeated_lazy_message_extension].add().bb = 327 + + extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAZ) + extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAZ) + extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAZ) + + extensions[pb2.repeated_string_piece_extension].append(u'324') + extensions[pb2.repeated_cord_extension].append(u'325') + + # + # Fields with defaults. + # + + extensions[pb2.default_int32_extension] = 401 + extensions[pb2.default_int64_extension] = 402 + extensions[pb2.default_uint32_extension] = 403 + extensions[pb2.default_uint64_extension] = 404 + extensions[pb2.default_sint32_extension] = 405 + extensions[pb2.default_sint64_extension] = 406 + extensions[pb2.default_fixed32_extension] = 407 + extensions[pb2.default_fixed64_extension] = 408 + extensions[pb2.default_sfixed32_extension] = 409 + extensions[pb2.default_sfixed64_extension] = 410 + extensions[pb2.default_float_extension] = 411 + extensions[pb2.default_double_extension] = 412 + extensions[pb2.default_bool_extension] = False + extensions[pb2.default_string_extension] = u'415' + extensions[pb2.default_bytes_extension] = b'416' + + extensions[pb2.default_nested_enum_extension] = pb2.TestAllTypes.FOO + extensions[pb2.default_foreign_enum_extension] = pb2.FOREIGN_FOO + extensions[pb2.default_import_enum_extension] = import_pb2.IMPORT_FOO + + extensions[pb2.default_string_piece_extension] = u'424' + extensions[pb2.default_cord_extension] = '425' + + extensions[pb2.oneof_uint32_extension] = 601 + extensions[pb2.oneof_nested_message_extension].bb = 602 + extensions[pb2.oneof_string_extension] = u'603' + extensions[pb2.oneof_bytes_extension] = b'604' + + +def SetAllFieldsAndExtensions(message): + """Sets every field and extension in the message to a unique value. + + Args: + message: A unittest_pb2.TestAllExtensions message. + """ + message.my_int = 1 + message.my_string = 'foo' + message.my_float = 1.0 + message.Extensions[unittest_pb2.my_extension_int] = 23 + message.Extensions[unittest_pb2.my_extension_string] = 'bar' + + +def ExpectAllFieldsAndExtensionsInOrder(serialized): + """Ensures that serialized is the serialization we expect for a message + filled with SetAllFieldsAndExtensions(). (Specifically, ensures that the + serialization is in canonical, tag-number order). + """ + my_extension_int = unittest_pb2.my_extension_int + my_extension_string = unittest_pb2.my_extension_string + expected_strings = [] + message = unittest_pb2.TestFieldOrderings() + message.my_int = 1 # Field 1. + expected_strings.append(message.SerializeToString()) + message.Clear() + message.Extensions[my_extension_int] = 23 # Field 5. + expected_strings.append(message.SerializeToString()) + message.Clear() + message.my_string = 'foo' # Field 11. + expected_strings.append(message.SerializeToString()) + message.Clear() + message.Extensions[my_extension_string] = 'bar' # Field 50. + expected_strings.append(message.SerializeToString()) + message.Clear() + message.my_float = 1.0 + expected_strings.append(message.SerializeToString()) + message.Clear() + expected = b''.join(expected_strings) + + if expected != serialized: + raise ValueError('Expected %r, found %r' % (expected, serialized)) + + +def ExpectAllFieldsSet(test_case, message): + """Check all fields for correct values have after Set*Fields() is called.""" + test_case.assertTrue(message.HasField('optional_int32')) + test_case.assertTrue(message.HasField('optional_int64')) + test_case.assertTrue(message.HasField('optional_uint32')) + test_case.assertTrue(message.HasField('optional_uint64')) + test_case.assertTrue(message.HasField('optional_sint32')) + test_case.assertTrue(message.HasField('optional_sint64')) + test_case.assertTrue(message.HasField('optional_fixed32')) + test_case.assertTrue(message.HasField('optional_fixed64')) + test_case.assertTrue(message.HasField('optional_sfixed32')) + test_case.assertTrue(message.HasField('optional_sfixed64')) + test_case.assertTrue(message.HasField('optional_float')) + test_case.assertTrue(message.HasField('optional_double')) + test_case.assertTrue(message.HasField('optional_bool')) + test_case.assertTrue(message.HasField('optional_string')) + test_case.assertTrue(message.HasField('optional_bytes')) + + if IsProto2(message): + test_case.assertTrue(message.HasField('optionalgroup')) + test_case.assertTrue(message.HasField('optional_nested_message')) + test_case.assertTrue(message.HasField('optional_foreign_message')) + test_case.assertTrue(message.HasField('optional_import_message')) + + test_case.assertTrue(message.optionalgroup.HasField('a')) + test_case.assertTrue(message.optional_nested_message.HasField('bb')) + test_case.assertTrue(message.optional_foreign_message.HasField('c')) + test_case.assertTrue(message.optional_import_message.HasField('d')) + + test_case.assertTrue(message.HasField('optional_nested_enum')) + test_case.assertTrue(message.HasField('optional_foreign_enum')) + if IsProto2(message): + test_case.assertTrue(message.HasField('optional_import_enum')) + + test_case.assertTrue(message.HasField('optional_string_piece')) + test_case.assertTrue(message.HasField('optional_cord')) + + test_case.assertEqual(101, message.optional_int32) + test_case.assertEqual(102, message.optional_int64) + test_case.assertEqual(103, message.optional_uint32) + test_case.assertEqual(104, message.optional_uint64) + test_case.assertEqual(105, message.optional_sint32) + test_case.assertEqual(106, message.optional_sint64) + test_case.assertEqual(107, message.optional_fixed32) + test_case.assertEqual(108, message.optional_fixed64) + test_case.assertEqual(109, message.optional_sfixed32) + test_case.assertEqual(110, message.optional_sfixed64) + test_case.assertEqual(111, message.optional_float) + test_case.assertEqual(112, message.optional_double) + test_case.assertEqual(True, message.optional_bool) + test_case.assertEqual('115', message.optional_string) + test_case.assertEqual(b'116', message.optional_bytes) + + if IsProto2(message): + test_case.assertEqual(117, message.optionalgroup.a) + test_case.assertEqual(118, message.optional_nested_message.bb) + test_case.assertEqual(119, message.optional_foreign_message.c) + test_case.assertEqual(120, message.optional_import_message.d) + test_case.assertEqual(126, message.optional_public_import_message.e) + test_case.assertEqual(127, message.optional_lazy_message.bb) + + test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ, + message.optional_nested_enum) + test_case.assertEqual(unittest_pb2.FOREIGN_BAZ, + message.optional_foreign_enum) + if IsProto2(message): + test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ, + message.optional_import_enum) + + # ----------------------------------------------------------------- + + test_case.assertEqual(2, len(message.repeated_int32)) + test_case.assertEqual(2, len(message.repeated_int64)) + test_case.assertEqual(2, len(message.repeated_uint32)) + test_case.assertEqual(2, len(message.repeated_uint64)) + test_case.assertEqual(2, len(message.repeated_sint32)) + test_case.assertEqual(2, len(message.repeated_sint64)) + test_case.assertEqual(2, len(message.repeated_fixed32)) + test_case.assertEqual(2, len(message.repeated_fixed64)) + test_case.assertEqual(2, len(message.repeated_sfixed32)) + test_case.assertEqual(2, len(message.repeated_sfixed64)) + test_case.assertEqual(2, len(message.repeated_float)) + test_case.assertEqual(2, len(message.repeated_double)) + test_case.assertEqual(2, len(message.repeated_bool)) + test_case.assertEqual(2, len(message.repeated_string)) + test_case.assertEqual(2, len(message.repeated_bytes)) + + if IsProto2(message): + test_case.assertEqual(2, len(message.repeatedgroup)) + test_case.assertEqual(2, len(message.repeated_nested_message)) + test_case.assertEqual(2, len(message.repeated_foreign_message)) + test_case.assertEqual(2, len(message.repeated_import_message)) + test_case.assertEqual(2, len(message.repeated_nested_enum)) + test_case.assertEqual(2, len(message.repeated_foreign_enum)) + if IsProto2(message): + test_case.assertEqual(2, len(message.repeated_import_enum)) + + test_case.assertEqual(2, len(message.repeated_string_piece)) + test_case.assertEqual(2, len(message.repeated_cord)) + + test_case.assertEqual(201, message.repeated_int32[0]) + test_case.assertEqual(202, message.repeated_int64[0]) + test_case.assertEqual(203, message.repeated_uint32[0]) + test_case.assertEqual(204, message.repeated_uint64[0]) + test_case.assertEqual(205, message.repeated_sint32[0]) + test_case.assertEqual(206, message.repeated_sint64[0]) + test_case.assertEqual(207, message.repeated_fixed32[0]) + test_case.assertEqual(208, message.repeated_fixed64[0]) + test_case.assertEqual(209, message.repeated_sfixed32[0]) + test_case.assertEqual(210, message.repeated_sfixed64[0]) + test_case.assertEqual(211, message.repeated_float[0]) + test_case.assertEqual(212, message.repeated_double[0]) + test_case.assertEqual(True, message.repeated_bool[0]) + test_case.assertEqual('215', message.repeated_string[0]) + test_case.assertEqual(b'216', message.repeated_bytes[0]) + + if IsProto2(message): + test_case.assertEqual(217, message.repeatedgroup[0].a) + test_case.assertEqual(218, message.repeated_nested_message[0].bb) + test_case.assertEqual(219, message.repeated_foreign_message[0].c) + test_case.assertEqual(220, message.repeated_import_message[0].d) + test_case.assertEqual(227, message.repeated_lazy_message[0].bb) + + test_case.assertEqual(unittest_pb2.TestAllTypes.BAR, + message.repeated_nested_enum[0]) + test_case.assertEqual(unittest_pb2.FOREIGN_BAR, + message.repeated_foreign_enum[0]) + if IsProto2(message): + test_case.assertEqual(unittest_import_pb2.IMPORT_BAR, + message.repeated_import_enum[0]) + + test_case.assertEqual(301, message.repeated_int32[1]) + test_case.assertEqual(302, message.repeated_int64[1]) + test_case.assertEqual(303, message.repeated_uint32[1]) + test_case.assertEqual(304, message.repeated_uint64[1]) + test_case.assertEqual(305, message.repeated_sint32[1]) + test_case.assertEqual(306, message.repeated_sint64[1]) + test_case.assertEqual(307, message.repeated_fixed32[1]) + test_case.assertEqual(308, message.repeated_fixed64[1]) + test_case.assertEqual(309, message.repeated_sfixed32[1]) + test_case.assertEqual(310, message.repeated_sfixed64[1]) + test_case.assertEqual(311, message.repeated_float[1]) + test_case.assertEqual(312, message.repeated_double[1]) + test_case.assertEqual(False, message.repeated_bool[1]) + test_case.assertEqual('315', message.repeated_string[1]) + test_case.assertEqual(b'316', message.repeated_bytes[1]) + + if IsProto2(message): + test_case.assertEqual(317, message.repeatedgroup[1].a) + test_case.assertEqual(318, message.repeated_nested_message[1].bb) + test_case.assertEqual(319, message.repeated_foreign_message[1].c) + test_case.assertEqual(320, message.repeated_import_message[1].d) + test_case.assertEqual(327, message.repeated_lazy_message[1].bb) + + test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ, + message.repeated_nested_enum[1]) + test_case.assertEqual(unittest_pb2.FOREIGN_BAZ, + message.repeated_foreign_enum[1]) + if IsProto2(message): + test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ, + message.repeated_import_enum[1]) + + # ----------------------------------------------------------------- + + if IsProto2(message): + test_case.assertTrue(message.HasField('default_int32')) + test_case.assertTrue(message.HasField('default_int64')) + test_case.assertTrue(message.HasField('default_uint32')) + test_case.assertTrue(message.HasField('default_uint64')) + test_case.assertTrue(message.HasField('default_sint32')) + test_case.assertTrue(message.HasField('default_sint64')) + test_case.assertTrue(message.HasField('default_fixed32')) + test_case.assertTrue(message.HasField('default_fixed64')) + test_case.assertTrue(message.HasField('default_sfixed32')) + test_case.assertTrue(message.HasField('default_sfixed64')) + test_case.assertTrue(message.HasField('default_float')) + test_case.assertTrue(message.HasField('default_double')) + test_case.assertTrue(message.HasField('default_bool')) + test_case.assertTrue(message.HasField('default_string')) + test_case.assertTrue(message.HasField('default_bytes')) + + test_case.assertTrue(message.HasField('default_nested_enum')) + test_case.assertTrue(message.HasField('default_foreign_enum')) + test_case.assertTrue(message.HasField('default_import_enum')) + + test_case.assertEqual(401, message.default_int32) + test_case.assertEqual(402, message.default_int64) + test_case.assertEqual(403, message.default_uint32) + test_case.assertEqual(404, message.default_uint64) + test_case.assertEqual(405, message.default_sint32) + test_case.assertEqual(406, message.default_sint64) + test_case.assertEqual(407, message.default_fixed32) + test_case.assertEqual(408, message.default_fixed64) + test_case.assertEqual(409, message.default_sfixed32) + test_case.assertEqual(410, message.default_sfixed64) + test_case.assertEqual(411, message.default_float) + test_case.assertEqual(412, message.default_double) + test_case.assertEqual(False, message.default_bool) + test_case.assertEqual('415', message.default_string) + test_case.assertEqual(b'416', message.default_bytes) + + test_case.assertEqual(unittest_pb2.TestAllTypes.FOO, + message.default_nested_enum) + test_case.assertEqual(unittest_pb2.FOREIGN_FOO, + message.default_foreign_enum) + test_case.assertEqual(unittest_import_pb2.IMPORT_FOO, + message.default_import_enum) + + +def GoldenFile(filename): + """Finds the given golden file and returns a file object representing it.""" + + # Search up the directory tree looking for the C++ protobuf source code. + path = '.' + while os.path.exists(path): + if os.path.exists(os.path.join(path, 'src/google/protobuf')): + # Found it. Load the golden file from the testdata directory. + full_path = os.path.join(path, 'src/google/protobuf/testdata', filename) + return open(full_path, 'rb') + path = os.path.join(path, '..') + + # Search internally. + path = '.' + full_path = os.path.join(path, 'third_party/py/google/protobuf/testdata', + filename) + if os.path.exists(full_path): + # Found it. Load the golden file from the testdata directory. + return open(full_path, 'rb') + + raise RuntimeError( + 'Could not find golden files. This test must be run from within the ' + 'protobuf source package so that it can read test data files from the ' + 'C++ source tree.') + + +def GoldenFileData(filename): + """Finds the given golden file and returns its contents.""" + with GoldenFile(filename) as f: + return f.read() + + +def SetAllPackedFields(message): + """Sets every field in the message to a unique value. + + Args: + message: A TestPackedTypes instance. + """ + message.packed_int32.extend([601, 701]) + message.packed_int64.extend([602, 702]) + message.packed_uint32.extend([603, 703]) + message.packed_uint64.extend([604, 704]) + message.packed_sint32.extend([605, 705]) + message.packed_sint64.extend([606, 706]) + message.packed_fixed32.extend([607, 707]) + message.packed_fixed64.extend([608, 708]) + message.packed_sfixed32.extend([609, 709]) + message.packed_sfixed64.extend([610, 710]) + message.packed_float.extend([611.0, 711.0]) + message.packed_double.extend([612.0, 712.0]) + message.packed_bool.extend([True, False]) + message.packed_enum.extend([unittest_pb2.FOREIGN_BAR, + unittest_pb2.FOREIGN_BAZ]) + + +def SetAllPackedExtensions(message): + """Sets every extension in the message to a unique value. + + Args: + message: A unittest_pb2.TestPackedExtensions instance. + """ + extensions = message.Extensions + pb2 = unittest_pb2 + + extensions[pb2.packed_int32_extension].extend([601, 701]) + extensions[pb2.packed_int64_extension].extend([602, 702]) + extensions[pb2.packed_uint32_extension].extend([603, 703]) + extensions[pb2.packed_uint64_extension].extend([604, 704]) + extensions[pb2.packed_sint32_extension].extend([605, 705]) + extensions[pb2.packed_sint64_extension].extend([606, 706]) + extensions[pb2.packed_fixed32_extension].extend([607, 707]) + extensions[pb2.packed_fixed64_extension].extend([608, 708]) + extensions[pb2.packed_sfixed32_extension].extend([609, 709]) + extensions[pb2.packed_sfixed64_extension].extend([610, 710]) + extensions[pb2.packed_float_extension].extend([611.0, 711.0]) + extensions[pb2.packed_double_extension].extend([612.0, 712.0]) + extensions[pb2.packed_bool_extension].extend([True, False]) + extensions[pb2.packed_enum_extension].extend([unittest_pb2.FOREIGN_BAR, + unittest_pb2.FOREIGN_BAZ]) + + +def SetAllUnpackedFields(message): + """Sets every field in the message to a unique value. + + Args: + message: A unittest_pb2.TestUnpackedTypes instance. + """ + message.unpacked_int32.extend([601, 701]) + message.unpacked_int64.extend([602, 702]) + message.unpacked_uint32.extend([603, 703]) + message.unpacked_uint64.extend([604, 704]) + message.unpacked_sint32.extend([605, 705]) + message.unpacked_sint64.extend([606, 706]) + message.unpacked_fixed32.extend([607, 707]) + message.unpacked_fixed64.extend([608, 708]) + message.unpacked_sfixed32.extend([609, 709]) + message.unpacked_sfixed64.extend([610, 710]) + message.unpacked_float.extend([611.0, 711.0]) + message.unpacked_double.extend([612.0, 712.0]) + message.unpacked_bool.extend([True, False]) + message.unpacked_enum.extend([unittest_pb2.FOREIGN_BAR, + unittest_pb2.FOREIGN_BAZ]) diff --git a/deps/google/protobuf/internal/text_encoding_test.py b/deps/google/protobuf/internal/text_encoding_test.py new file mode 100644 index 00000000..338a287b --- /dev/null +++ b/deps/google/protobuf/internal/text_encoding_test.py @@ -0,0 +1,71 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests for google.protobuf.text_encoding.""" + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import text_encoding + +TEST_VALUES = [ + ("foo\\rbar\\nbaz\\t", + "foo\\rbar\\nbaz\\t", + b"foo\rbar\nbaz\t"), + ("\\'full of \\\"sound\\\" and \\\"fury\\\"\\'", + "\\'full of \\\"sound\\\" and \\\"fury\\\"\\'", + b"'full of \"sound\" and \"fury\"'"), + ("signi\\\\fying\\\\ nothing\\\\", + "signi\\\\fying\\\\ nothing\\\\", + b"signi\\fying\\ nothing\\"), + ("\\010\\t\\n\\013\\014\\r", + "\x08\\t\\n\x0b\x0c\\r", + b"\010\011\012\013\014\015")] + + +class TextEncodingTestCase(unittest.TestCase): + def testCEscape(self): + for escaped, escaped_utf8, unescaped in TEST_VALUES: + self.assertEqual(escaped, + text_encoding.CEscape(unescaped, as_utf8=False)) + self.assertEqual(escaped_utf8, + text_encoding.CEscape(unescaped, as_utf8=True)) + + def testCUnescape(self): + for escaped, escaped_utf8, unescaped in TEST_VALUES: + self.assertEqual(unescaped, text_encoding.CUnescape(escaped)) + self.assertEqual(unescaped, text_encoding.CUnescape(escaped_utf8)) + + +if __name__ == "__main__": + unittest.main() diff --git a/deps/google/protobuf/internal/text_format_test.py b/deps/google/protobuf/internal/text_format_test.py new file mode 100644 index 00000000..0e14556c --- /dev/null +++ b/deps/google/protobuf/internal/text_format_test.py @@ -0,0 +1,1039 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Test for google.protobuf.text_format.""" + +__author__ = 'kenton@google.com (Kenton Varda)' + + +import re +import six +import string + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf.internal import _parameterized + +from google.protobuf import map_unittest_pb2 +from google.protobuf import unittest_mset_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import unittest_proto3_arena_pb2 +from google.protobuf.internal import api_implementation +from google.protobuf.internal import test_util +from google.protobuf.internal import message_set_extensions_pb2 +from google.protobuf import text_format + + +# Low-level nuts-n-bolts tests. +class SimpleTextFormatTests(unittest.TestCase): + + # The members of _QUOTES are formatted into a regexp template that + # expects single characters. Therefore it's an error (in addition to being + # non-sensical in the first place) to try to specify a "quote mark" that is + # more than one character. + def TestQuoteMarksAreSingleChars(self): + for quote in text_format._QUOTES: + self.assertEqual(1, len(quote)) + + +# Base class with some common functionality. +class TextFormatBase(unittest.TestCase): + + def ReadGolden(self, golden_filename): + with test_util.GoldenFile(golden_filename) as f: + return (f.readlines() if str is bytes else # PY3 + [golden_line.decode('utf-8') for golden_line in f]) + + def CompareToGoldenFile(self, text, golden_filename): + golden_lines = self.ReadGolden(golden_filename) + self.assertMultiLineEqual(text, ''.join(golden_lines)) + + def CompareToGoldenText(self, text, golden_text): + self.assertEqual(text, golden_text) + + def RemoveRedundantZeros(self, text): + # Some platforms print 1e+5 as 1e+005. This is fine, but we need to remove + # these zeros in order to match the golden file. + text = text.replace('e+0','e+').replace('e+0','e+') \ + .replace('e-0','e-').replace('e-0','e-') + # Floating point fields are printed with .0 suffix even if they are + # actualy integer numbers. + text = re.compile('\.0$', re.MULTILINE).sub('', text) + return text + + +@_parameterized.Parameters( + (unittest_pb2), + (unittest_proto3_arena_pb2)) +class TextFormatTest(TextFormatBase): + + def testPrintExotic(self, message_module): + message = message_module.TestAllTypes() + message.repeated_int64.append(-9223372036854775808) + message.repeated_uint64.append(18446744073709551615) + message.repeated_double.append(123.456) + message.repeated_double.append(1.23e22) + message.repeated_double.append(1.23e-18) + message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"') + message.repeated_string.append(u'\u00fc\ua71f') + self.CompareToGoldenText( + self.RemoveRedundantZeros(text_format.MessageToString(message)), + 'repeated_int64: -9223372036854775808\n' + 'repeated_uint64: 18446744073709551615\n' + 'repeated_double: 123.456\n' + 'repeated_double: 1.23e+22\n' + 'repeated_double: 1.23e-18\n' + 'repeated_string:' + ' "\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n' + 'repeated_string: "\\303\\274\\352\\234\\237"\n') + + def testPrintExoticUnicodeSubclass(self, message_module): + class UnicodeSub(six.text_type): + pass + message = message_module.TestAllTypes() + message.repeated_string.append(UnicodeSub(u'\u00fc\ua71f')) + self.CompareToGoldenText( + text_format.MessageToString(message), + 'repeated_string: "\\303\\274\\352\\234\\237"\n') + + def testPrintNestedMessageAsOneLine(self, message_module): + message = message_module.TestAllTypes() + msg = message.repeated_nested_message.add() + msg.bb = 42 + self.CompareToGoldenText( + text_format.MessageToString(message, as_one_line=True), + 'repeated_nested_message { bb: 42 }') + + def testPrintRepeatedFieldsAsOneLine(self, message_module): + message = message_module.TestAllTypes() + message.repeated_int32.append(1) + message.repeated_int32.append(1) + message.repeated_int32.append(3) + message.repeated_string.append('Google') + message.repeated_string.append('Zurich') + self.CompareToGoldenText( + text_format.MessageToString(message, as_one_line=True), + 'repeated_int32: 1 repeated_int32: 1 repeated_int32: 3 ' + 'repeated_string: "Google" repeated_string: "Zurich"') + + def testPrintNestedNewLineInStringAsOneLine(self, message_module): + message = message_module.TestAllTypes() + message.optional_string = 'a\nnew\nline' + self.CompareToGoldenText( + text_format.MessageToString(message, as_one_line=True), + 'optional_string: "a\\nnew\\nline"') + + def testPrintExoticAsOneLine(self, message_module): + message = message_module.TestAllTypes() + message.repeated_int64.append(-9223372036854775808) + message.repeated_uint64.append(18446744073709551615) + message.repeated_double.append(123.456) + message.repeated_double.append(1.23e22) + message.repeated_double.append(1.23e-18) + message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"') + message.repeated_string.append(u'\u00fc\ua71f') + self.CompareToGoldenText( + self.RemoveRedundantZeros( + text_format.MessageToString(message, as_one_line=True)), + 'repeated_int64: -9223372036854775808' + ' repeated_uint64: 18446744073709551615' + ' repeated_double: 123.456' + ' repeated_double: 1.23e+22' + ' repeated_double: 1.23e-18' + ' repeated_string: ' + '"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""' + ' repeated_string: "\\303\\274\\352\\234\\237"') + + def testRoundTripExoticAsOneLine(self, message_module): + message = message_module.TestAllTypes() + message.repeated_int64.append(-9223372036854775808) + message.repeated_uint64.append(18446744073709551615) + message.repeated_double.append(123.456) + message.repeated_double.append(1.23e22) + message.repeated_double.append(1.23e-18) + message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"') + message.repeated_string.append(u'\u00fc\ua71f') + + # Test as_utf8 = False. + wire_text = text_format.MessageToString( + message, as_one_line=True, as_utf8=False) + parsed_message = message_module.TestAllTypes() + r = text_format.Parse(wire_text, parsed_message) + self.assertIs(r, parsed_message) + self.assertEqual(message, parsed_message) + + # Test as_utf8 = True. + wire_text = text_format.MessageToString( + message, as_one_line=True, as_utf8=True) + parsed_message = message_module.TestAllTypes() + r = text_format.Parse(wire_text, parsed_message) + self.assertIs(r, parsed_message) + self.assertEqual(message, parsed_message, + '\n%s != %s' % (message, parsed_message)) + + def testPrintRawUtf8String(self, message_module): + message = message_module.TestAllTypes() + message.repeated_string.append(u'\u00fc\ua71f') + text = text_format.MessageToString(message, as_utf8=True) + self.CompareToGoldenText(text, 'repeated_string: "\303\274\352\234\237"\n') + parsed_message = message_module.TestAllTypes() + text_format.Parse(text, parsed_message) + self.assertEqual(message, parsed_message, + '\n%s != %s' % (message, parsed_message)) + + def testPrintFloatFormat(self, message_module): + # Check that float_format argument is passed to sub-message formatting. + message = message_module.NestedTestAllTypes() + # We use 1.25 as it is a round number in binary. The proto 32-bit float + # will not gain additional imprecise digits as a 64-bit Python float and + # show up in its str. 32-bit 1.2 is noisy when extended to 64-bit: + # >>> struct.unpack('f', struct.pack('f', 1.2))[0] + # 1.2000000476837158 + # >>> struct.unpack('f', struct.pack('f', 1.25))[0] + # 1.25 + message.payload.optional_float = 1.25 + # Check rounding at 15 significant digits + message.payload.optional_double = -.000003456789012345678 + # Check no decimal point. + message.payload.repeated_float.append(-5642) + # Check no trailing zeros. + message.payload.repeated_double.append(.000078900) + formatted_fields = ['optional_float: 1.25', + 'optional_double: -3.45678901234568e-6', + 'repeated_float: -5642', + 'repeated_double: 7.89e-5'] + text_message = text_format.MessageToString(message, float_format='.15g') + self.CompareToGoldenText( + self.RemoveRedundantZeros(text_message), + 'payload {{\n {0}\n {1}\n {2}\n {3}\n}}\n'.format(*formatted_fields)) + # as_one_line=True is a separate code branch where float_format is passed. + text_message = text_format.MessageToString(message, as_one_line=True, + float_format='.15g') + self.CompareToGoldenText( + self.RemoveRedundantZeros(text_message), + 'payload {{ {0} {1} {2} {3} }}'.format(*formatted_fields)) + + def testMessageToString(self, message_module): + message = message_module.ForeignMessage() + message.c = 123 + self.assertEqual('c: 123\n', str(message)) + + def testParseAllFields(self, message_module): + message = message_module.TestAllTypes() + test_util.SetAllFields(message) + ascii_text = text_format.MessageToString(message) + + parsed_message = message_module.TestAllTypes() + text_format.Parse(ascii_text, parsed_message) + self.assertEqual(message, parsed_message) + if message_module is unittest_pb2: + test_util.ExpectAllFieldsSet(self, message) + + def testParseExotic(self, message_module): + message = message_module.TestAllTypes() + text = ('repeated_int64: -9223372036854775808\n' + 'repeated_uint64: 18446744073709551615\n' + 'repeated_double: 123.456\n' + 'repeated_double: 1.23e+22\n' + 'repeated_double: 1.23e-18\n' + 'repeated_string: \n' + '"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n' + 'repeated_string: "foo" \'corge\' "grault"\n' + 'repeated_string: "\\303\\274\\352\\234\\237"\n' + 'repeated_string: "\\xc3\\xbc"\n' + 'repeated_string: "\xc3\xbc"\n') + text_format.Parse(text, message) + + self.assertEqual(-9223372036854775808, message.repeated_int64[0]) + self.assertEqual(18446744073709551615, message.repeated_uint64[0]) + self.assertEqual(123.456, message.repeated_double[0]) + self.assertEqual(1.23e22, message.repeated_double[1]) + self.assertEqual(1.23e-18, message.repeated_double[2]) + self.assertEqual( + '\000\001\a\b\f\n\r\t\v\\\'"', message.repeated_string[0]) + self.assertEqual('foocorgegrault', message.repeated_string[1]) + self.assertEqual(u'\u00fc\ua71f', message.repeated_string[2]) + self.assertEqual(u'\u00fc', message.repeated_string[3]) + + def testParseTrailingCommas(self, message_module): + message = message_module.TestAllTypes() + text = ('repeated_int64: 100;\n' + 'repeated_int64: 200;\n' + 'repeated_int64: 300,\n' + 'repeated_string: "one",\n' + 'repeated_string: "two";\n') + text_format.Parse(text, message) + + self.assertEqual(100, message.repeated_int64[0]) + self.assertEqual(200, message.repeated_int64[1]) + self.assertEqual(300, message.repeated_int64[2]) + self.assertEqual(u'one', message.repeated_string[0]) + self.assertEqual(u'two', message.repeated_string[1]) + + def testParseRepeatedScalarShortFormat(self, message_module): + message = message_module.TestAllTypes() + text = ('repeated_int64: [100, 200];\n' + 'repeated_int64: 300,\n' + 'repeated_string: ["one", "two"];\n') + text_format.Parse(text, message) + + self.assertEqual(100, message.repeated_int64[0]) + self.assertEqual(200, message.repeated_int64[1]) + self.assertEqual(300, message.repeated_int64[2]) + self.assertEqual(u'one', message.repeated_string[0]) + self.assertEqual(u'two', message.repeated_string[1]) + + def testParseEmptyText(self, message_module): + message = message_module.TestAllTypes() + text = '' + text_format.Parse(text, message) + self.assertEqual(message_module.TestAllTypes(), message) + + def testParseInvalidUtf8(self, message_module): + message = message_module.TestAllTypes() + text = 'repeated_string: "\\xc3\\xc3"' + self.assertRaises(text_format.ParseError, text_format.Parse, text, message) + + def testParseSingleWord(self, message_module): + message = message_module.TestAllTypes() + text = 'foo' + six.assertRaisesRegex(self, + text_format.ParseError, + (r'1:1 : Message type "\w+.TestAllTypes" has no field named ' + r'"foo".'), + text_format.Parse, text, message) + + def testParseUnknownField(self, message_module): + message = message_module.TestAllTypes() + text = 'unknown_field: 8\n' + six.assertRaisesRegex(self, + text_format.ParseError, + (r'1:1 : Message type "\w+.TestAllTypes" has no field named ' + r'"unknown_field".'), + text_format.Parse, text, message) + + def testParseBadEnumValue(self, message_module): + message = message_module.TestAllTypes() + text = 'optional_nested_enum: BARR' + six.assertRaisesRegex(self, + text_format.ParseError, + (r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" ' + r'has no value named BARR.'), + text_format.Parse, text, message) + + message = message_module.TestAllTypes() + text = 'optional_nested_enum: 100' + six.assertRaisesRegex(self, + text_format.ParseError, + (r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" ' + r'has no value with number 100.'), + text_format.Parse, text, message) + + def testParseBadIntValue(self, message_module): + message = message_module.TestAllTypes() + text = 'optional_int32: bork' + six.assertRaisesRegex(self, + text_format.ParseError, + ('1:17 : Couldn\'t parse integer: bork'), + text_format.Parse, text, message) + + def testParseStringFieldUnescape(self, message_module): + message = message_module.TestAllTypes() + text = r'''repeated_string: "\xf\x62" + repeated_string: "\\xf\\x62" + repeated_string: "\\\xf\\\x62" + repeated_string: "\\\\xf\\\\x62" + repeated_string: "\\\\\xf\\\\\x62" + repeated_string: "\x5cx20"''' + text_format.Parse(text, message) + + SLASH = '\\' + self.assertEqual('\x0fb', message.repeated_string[0]) + self.assertEqual(SLASH + 'xf' + SLASH + 'x62', message.repeated_string[1]) + self.assertEqual(SLASH + '\x0f' + SLASH + 'b', message.repeated_string[2]) + self.assertEqual(SLASH + SLASH + 'xf' + SLASH + SLASH + 'x62', + message.repeated_string[3]) + self.assertEqual(SLASH + SLASH + '\x0f' + SLASH + SLASH + 'b', + message.repeated_string[4]) + self.assertEqual(SLASH + 'x20', message.repeated_string[5]) + + def testMergeDuplicateScalars(self, message_module): + message = message_module.TestAllTypes() + text = ('optional_int32: 42 ' + 'optional_int32: 67') + r = text_format.Merge(text, message) + self.assertIs(r, message) + self.assertEqual(67, message.optional_int32) + + def testMergeDuplicateNestedMessageScalars(self, message_module): + message = message_module.TestAllTypes() + text = ('optional_nested_message { bb: 1 } ' + 'optional_nested_message { bb: 2 }') + r = text_format.Merge(text, message) + self.assertTrue(r is message) + self.assertEqual(2, message.optional_nested_message.bb) + + def testParseOneof(self, message_module): + m = message_module.TestAllTypes() + m.oneof_uint32 = 11 + m2 = message_module.TestAllTypes() + text_format.Parse(text_format.MessageToString(m), m2) + self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field')) + + +# These are tests that aren't fundamentally specific to proto2, but are at +# the moment because of differences between the proto2 and proto3 test schemas. +# Ideally the schemas would be made more similar so these tests could pass. +class OnlyWorksWithProto2RightNowTests(TextFormatBase): + + def testPrintAllFieldsPointy(self): + message = unittest_pb2.TestAllTypes() + test_util.SetAllFields(message) + self.CompareToGoldenFile( + self.RemoveRedundantZeros( + text_format.MessageToString(message, pointy_brackets=True)), + 'text_format_unittest_data_pointy_oneof.txt') + + def testParseGolden(self): + golden_text = '\n'.join(self.ReadGolden('text_format_unittest_data.txt')) + parsed_message = unittest_pb2.TestAllTypes() + r = text_format.Parse(golden_text, parsed_message) + self.assertIs(r, parsed_message) + + message = unittest_pb2.TestAllTypes() + test_util.SetAllFields(message) + self.assertEqual(message, parsed_message) + + def testPrintAllFields(self): + message = unittest_pb2.TestAllTypes() + test_util.SetAllFields(message) + self.CompareToGoldenFile( + self.RemoveRedundantZeros(text_format.MessageToString(message)), + 'text_format_unittest_data_oneof_implemented.txt') + + def testPrintAllFieldsPointy(self): + message = unittest_pb2.TestAllTypes() + test_util.SetAllFields(message) + self.CompareToGoldenFile( + self.RemoveRedundantZeros( + text_format.MessageToString(message, pointy_brackets=True)), + 'text_format_unittest_data_pointy_oneof.txt') + + def testPrintInIndexOrder(self): + message = unittest_pb2.TestFieldOrderings() + message.my_string = '115' + message.my_int = 101 + message.my_float = 111 + message.optional_nested_message.oo = 0 + message.optional_nested_message.bb = 1 + self.CompareToGoldenText( + self.RemoveRedundantZeros(text_format.MessageToString( + message, use_index_order=True)), + 'my_string: \"115\"\nmy_int: 101\nmy_float: 111\n' + 'optional_nested_message {\n oo: 0\n bb: 1\n}\n') + self.CompareToGoldenText( + self.RemoveRedundantZeros(text_format.MessageToString( + message)), + 'my_int: 101\nmy_string: \"115\"\nmy_float: 111\n' + 'optional_nested_message {\n bb: 1\n oo: 0\n}\n') + + def testMergeLinesGolden(self): + opened = self.ReadGolden('text_format_unittest_data.txt') + parsed_message = unittest_pb2.TestAllTypes() + r = text_format.MergeLines(opened, parsed_message) + self.assertIs(r, parsed_message) + + message = unittest_pb2.TestAllTypes() + test_util.SetAllFields(message) + self.assertEqual(message, parsed_message) + + def testParseLinesGolden(self): + opened = self.ReadGolden('text_format_unittest_data.txt') + parsed_message = unittest_pb2.TestAllTypes() + r = text_format.ParseLines(opened, parsed_message) + self.assertIs(r, parsed_message) + + message = unittest_pb2.TestAllTypes() + test_util.SetAllFields(message) + self.assertEqual(message, parsed_message) + + def testPrintMap(self): + message = map_unittest_pb2.TestMap() + + message.map_int32_int32[-123] = -456 + message.map_int64_int64[-2**33] = -2**34 + message.map_uint32_uint32[123] = 456 + message.map_uint64_uint64[2**33] = 2**34 + message.map_string_string["abc"] = "123" + message.map_int32_foreign_message[111].c = 5 + + # Maps are serialized to text format using their underlying repeated + # representation. + self.CompareToGoldenText( + text_format.MessageToString(message), + 'map_int32_int32 {\n' + ' key: -123\n' + ' value: -456\n' + '}\n' + 'map_int64_int64 {\n' + ' key: -8589934592\n' + ' value: -17179869184\n' + '}\n' + 'map_uint32_uint32 {\n' + ' key: 123\n' + ' value: 456\n' + '}\n' + 'map_uint64_uint64 {\n' + ' key: 8589934592\n' + ' value: 17179869184\n' + '}\n' + 'map_string_string {\n' + ' key: "abc"\n' + ' value: "123"\n' + '}\n' + 'map_int32_foreign_message {\n' + ' key: 111\n' + ' value {\n' + ' c: 5\n' + ' }\n' + '}\n') + + def testMapOrderEnforcement(self): + message = map_unittest_pb2.TestMap() + for letter in string.ascii_uppercase[13:26]: + message.map_string_string[letter] = 'dummy' + for letter in reversed(string.ascii_uppercase[0:13]): + message.map_string_string[letter] = 'dummy' + golden = ''.join(( + 'map_string_string {\n key: "%c"\n value: "dummy"\n}\n' % (letter,) + for letter in string.ascii_uppercase)) + self.CompareToGoldenText(text_format.MessageToString(message), golden) + + def testMapOrderSemantics(self): + golden_lines = self.ReadGolden('map_test_data.txt') + # The C++ implementation emits defaulted-value fields, while the Python + # implementation does not. Adjusting for this is awkward, but it is + # valuable to test against a common golden file. + line_blacklist = (' key: 0\n', + ' value: 0\n', + ' key: false\n', + ' value: false\n') + golden_lines = [line for line in golden_lines if line not in line_blacklist] + + message = map_unittest_pb2.TestMap() + text_format.ParseLines(golden_lines, message) + candidate = text_format.MessageToString(message) + # The Python implementation emits "1.0" for the double value that the C++ + # implementation emits as "1". + candidate = candidate.replace('1.0', '1', 2) + self.assertMultiLineEqual(candidate, ''.join(golden_lines)) + + +# Tests of proto2-only features (MessageSet, extensions, etc.). +class Proto2Tests(TextFormatBase): + + def testPrintMessageSet(self): + message = unittest_mset_pb2.TestMessageSetContainer() + ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension + ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension + message.message_set.Extensions[ext1].i = 23 + message.message_set.Extensions[ext2].str = 'foo' + self.CompareToGoldenText( + text_format.MessageToString(message), + 'message_set {\n' + ' [protobuf_unittest.TestMessageSetExtension1] {\n' + ' i: 23\n' + ' }\n' + ' [protobuf_unittest.TestMessageSetExtension2] {\n' + ' str: \"foo\"\n' + ' }\n' + '}\n') + + message = message_set_extensions_pb2.TestMessageSet() + ext = message_set_extensions_pb2.message_set_extension3 + message.Extensions[ext].text = 'bar' + self.CompareToGoldenText( + text_format.MessageToString(message), + '[google.protobuf.internal.TestMessageSetExtension3] {\n' + ' text: \"bar\"\n' + '}\n') + + def testPrintMessageSetAsOneLine(self): + message = unittest_mset_pb2.TestMessageSetContainer() + ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension + ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension + message.message_set.Extensions[ext1].i = 23 + message.message_set.Extensions[ext2].str = 'foo' + self.CompareToGoldenText( + text_format.MessageToString(message, as_one_line=True), + 'message_set {' + ' [protobuf_unittest.TestMessageSetExtension1] {' + ' i: 23' + ' }' + ' [protobuf_unittest.TestMessageSetExtension2] {' + ' str: \"foo\"' + ' }' + ' }') + + def testParseMessageSet(self): + message = unittest_pb2.TestAllTypes() + text = ('repeated_uint64: 1\n' + 'repeated_uint64: 2\n') + text_format.Parse(text, message) + self.assertEqual(1, message.repeated_uint64[0]) + self.assertEqual(2, message.repeated_uint64[1]) + + message = unittest_mset_pb2.TestMessageSetContainer() + text = ('message_set {\n' + ' [protobuf_unittest.TestMessageSetExtension1] {\n' + ' i: 23\n' + ' }\n' + ' [protobuf_unittest.TestMessageSetExtension2] {\n' + ' str: \"foo\"\n' + ' }\n' + '}\n') + text_format.Parse(text, message) + ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension + ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension + self.assertEqual(23, message.message_set.Extensions[ext1].i) + self.assertEqual('foo', message.message_set.Extensions[ext2].str) + + def testPrintAllExtensions(self): + message = unittest_pb2.TestAllExtensions() + test_util.SetAllExtensions(message) + self.CompareToGoldenFile( + self.RemoveRedundantZeros(text_format.MessageToString(message)), + 'text_format_unittest_extensions_data.txt') + + def testPrintAllExtensionsPointy(self): + message = unittest_pb2.TestAllExtensions() + test_util.SetAllExtensions(message) + self.CompareToGoldenFile( + self.RemoveRedundantZeros(text_format.MessageToString( + message, pointy_brackets=True)), + 'text_format_unittest_extensions_data_pointy.txt') + + def testParseGoldenExtensions(self): + golden_text = '\n'.join(self.ReadGolden( + 'text_format_unittest_extensions_data.txt')) + parsed_message = unittest_pb2.TestAllExtensions() + text_format.Parse(golden_text, parsed_message) + + message = unittest_pb2.TestAllExtensions() + test_util.SetAllExtensions(message) + self.assertEqual(message, parsed_message) + + def testParseAllExtensions(self): + message = unittest_pb2.TestAllExtensions() + test_util.SetAllExtensions(message) + ascii_text = text_format.MessageToString(message) + + parsed_message = unittest_pb2.TestAllExtensions() + text_format.Parse(ascii_text, parsed_message) + self.assertEqual(message, parsed_message) + + def testParseAllowedUnknownExtension(self): + # Skip over unknown extension correctly. + message = unittest_mset_pb2.TestMessageSetContainer() + text = ('message_set {\n' + ' [unknown_extension] {\n' + ' i: 23\n' + ' [nested_unknown_ext]: {\n' + ' i: 23\n' + ' test: "test_string"\n' + ' floaty_float: -0.315\n' + ' num: -inf\n' + ' multiline_str: "abc"\n' + ' "def"\n' + ' "xyz."\n' + ' [nested_unknown_ext]: <\n' + ' i: 23\n' + ' i: 24\n' + ' pointfloat: .3\n' + ' test: "test_string"\n' + ' floaty_float: -0.315\n' + ' num: -inf\n' + ' long_string: "test" "test2" \n' + ' >\n' + ' }\n' + ' }\n' + ' [unknown_extension]: 5\n' + '}\n') + text_format.Parse(text, message, allow_unknown_extension=True) + golden = 'message_set {\n}\n' + self.CompareToGoldenText(text_format.MessageToString(message), golden) + + # Catch parse errors in unknown extension. + message = unittest_mset_pb2.TestMessageSetContainer() + malformed = ('message_set {\n' + ' [unknown_extension] {\n' + ' i:\n' # Missing value. + ' }\n' + '}\n') + six.assertRaisesRegex(self, + text_format.ParseError, + 'Invalid field value: }', + text_format.Parse, malformed, message, + allow_unknown_extension=True) + + message = unittest_mset_pb2.TestMessageSetContainer() + malformed = ('message_set {\n' + ' [unknown_extension] {\n' + ' str: "malformed string\n' # Missing closing quote. + ' }\n' + '}\n') + six.assertRaisesRegex(self, + text_format.ParseError, + 'Invalid field value: "', + text_format.Parse, malformed, message, + allow_unknown_extension=True) + + message = unittest_mset_pb2.TestMessageSetContainer() + malformed = ('message_set {\n' + ' [unknown_extension] {\n' + ' str: "malformed\n multiline\n string\n' + ' }\n' + '}\n') + six.assertRaisesRegex(self, + text_format.ParseError, + 'Invalid field value: "', + text_format.Parse, malformed, message, + allow_unknown_extension=True) + + message = unittest_mset_pb2.TestMessageSetContainer() + malformed = ('message_set {\n' + ' [malformed_extension] <\n' + ' i: -5\n' + ' \n' # Missing '>' here. + '}\n') + six.assertRaisesRegex(self, + text_format.ParseError, + '5:1 : Expected ">".', + text_format.Parse, malformed, message, + allow_unknown_extension=True) + + # Don't allow unknown fields with allow_unknown_extension=True. + message = unittest_mset_pb2.TestMessageSetContainer() + malformed = ('message_set {\n' + ' unknown_field: true\n' + ' \n' # Missing '>' here. + '}\n') + six.assertRaisesRegex(self, + text_format.ParseError, + ('2:3 : Message type ' + '"proto2_wireformat_unittest.TestMessageSet" has no' + ' field named "unknown_field".'), + text_format.Parse, malformed, message, + allow_unknown_extension=True) + + # Parse known extension correcty. + message = unittest_mset_pb2.TestMessageSetContainer() + text = ('message_set {\n' + ' [protobuf_unittest.TestMessageSetExtension1] {\n' + ' i: 23\n' + ' }\n' + ' [protobuf_unittest.TestMessageSetExtension2] {\n' + ' str: \"foo\"\n' + ' }\n' + '}\n') + text_format.Parse(text, message, allow_unknown_extension=True) + ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension + ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension + self.assertEqual(23, message.message_set.Extensions[ext1].i) + self.assertEqual('foo', message.message_set.Extensions[ext2].str) + + def testParseBadExtension(self): + message = unittest_pb2.TestAllExtensions() + text = '[unknown_extension]: 8\n' + six.assertRaisesRegex(self, + text_format.ParseError, + '1:2 : Extension "unknown_extension" not registered.', + text_format.Parse, text, message) + message = unittest_pb2.TestAllTypes() + six.assertRaisesRegex(self, + text_format.ParseError, + ('1:2 : Message type "protobuf_unittest.TestAllTypes" does not have ' + 'extensions.'), + text_format.Parse, text, message) + + def testMergeDuplicateExtensionScalars(self): + message = unittest_pb2.TestAllExtensions() + text = ('[protobuf_unittest.optional_int32_extension]: 42 ' + '[protobuf_unittest.optional_int32_extension]: 67') + text_format.Merge(text, message) + self.assertEqual( + 67, + message.Extensions[unittest_pb2.optional_int32_extension]) + + def testParseDuplicateExtensionScalars(self): + message = unittest_pb2.TestAllExtensions() + text = ('[protobuf_unittest.optional_int32_extension]: 42 ' + '[protobuf_unittest.optional_int32_extension]: 67') + six.assertRaisesRegex(self, + text_format.ParseError, + ('1:96 : Message type "protobuf_unittest.TestAllExtensions" ' + 'should not have multiple ' + '"protobuf_unittest.optional_int32_extension" extensions.'), + text_format.Parse, text, message) + + def testParseDuplicateNestedMessageScalars(self): + message = unittest_pb2.TestAllTypes() + text = ('optional_nested_message { bb: 1 } ' + 'optional_nested_message { bb: 2 }') + six.assertRaisesRegex(self, + text_format.ParseError, + ('1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" ' + 'should not have multiple "bb" fields.'), + text_format.Parse, text, message) + + def testParseDuplicateScalars(self): + message = unittest_pb2.TestAllTypes() + text = ('optional_int32: 42 ' + 'optional_int32: 67') + six.assertRaisesRegex(self, + text_format.ParseError, + ('1:36 : Message type "protobuf_unittest.TestAllTypes" should not ' + 'have multiple "optional_int32" fields.'), + text_format.Parse, text, message) + + def testParseGroupNotClosed(self): + message = unittest_pb2.TestAllTypes() + text = 'RepeatedGroup: <' + six.assertRaisesRegex(self, + text_format.ParseError, '1:16 : Expected ">".', + text_format.Parse, text, message) + text = 'RepeatedGroup: {' + six.assertRaisesRegex(self, + text_format.ParseError, '1:16 : Expected "}".', + text_format.Parse, text, message) + + def testParseEmptyGroup(self): + message = unittest_pb2.TestAllTypes() + text = 'OptionalGroup: {}' + text_format.Parse(text, message) + self.assertTrue(message.HasField('optionalgroup')) + + message.Clear() + + message = unittest_pb2.TestAllTypes() + text = 'OptionalGroup: <>' + text_format.Parse(text, message) + self.assertTrue(message.HasField('optionalgroup')) + + # Maps aren't really proto2-only, but our test schema only has maps for + # proto2. + def testParseMap(self): + text = ('map_int32_int32 {\n' + ' key: -123\n' + ' value: -456\n' + '}\n' + 'map_int64_int64 {\n' + ' key: -8589934592\n' + ' value: -17179869184\n' + '}\n' + 'map_uint32_uint32 {\n' + ' key: 123\n' + ' value: 456\n' + '}\n' + 'map_uint64_uint64 {\n' + ' key: 8589934592\n' + ' value: 17179869184\n' + '}\n' + 'map_string_string {\n' + ' key: "abc"\n' + ' value: "123"\n' + '}\n' + 'map_int32_foreign_message {\n' + ' key: 111\n' + ' value {\n' + ' c: 5\n' + ' }\n' + '}\n') + message = map_unittest_pb2.TestMap() + text_format.Parse(text, message) + + self.assertEqual(-456, message.map_int32_int32[-123]) + self.assertEqual(-2**34, message.map_int64_int64[-2**33]) + self.assertEqual(456, message.map_uint32_uint32[123]) + self.assertEqual(2**34, message.map_uint64_uint64[2**33]) + self.assertEqual("123", message.map_string_string["abc"]) + self.assertEqual(5, message.map_int32_foreign_message[111].c) + + +class TokenizerTest(unittest.TestCase): + + def testSimpleTokenCases(self): + text = ('identifier1:"string1"\n \n\n' + 'identifier2 : \n \n123 \n identifier3 :\'string\'\n' + 'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n' + 'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n' + 'ID9: 22 ID10: -111111111111111111 ID11: -22\n' + 'ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f ' + 'false_bool: 0 true_BOOL:t \n true_bool1: 1 false_BOOL1:f ') + tokenizer = text_format._Tokenizer(text.splitlines()) + methods = [(tokenizer.ConsumeIdentifier, 'identifier1'), + ':', + (tokenizer.ConsumeString, 'string1'), + (tokenizer.ConsumeIdentifier, 'identifier2'), + ':', + (tokenizer.ConsumeInt32, 123), + (tokenizer.ConsumeIdentifier, 'identifier3'), + ':', + (tokenizer.ConsumeString, 'string'), + (tokenizer.ConsumeIdentifier, 'identifiER_4'), + ':', + (tokenizer.ConsumeFloat, 1.1e+2), + (tokenizer.ConsumeIdentifier, 'ID5'), + ':', + (tokenizer.ConsumeFloat, -0.23), + (tokenizer.ConsumeIdentifier, 'ID6'), + ':', + (tokenizer.ConsumeString, 'aaaa\'bbbb'), + (tokenizer.ConsumeIdentifier, 'ID7'), + ':', + (tokenizer.ConsumeString, 'aa\"bb'), + (tokenizer.ConsumeIdentifier, 'ID8'), + ':', + '{', + (tokenizer.ConsumeIdentifier, 'A'), + ':', + (tokenizer.ConsumeFloat, float('inf')), + (tokenizer.ConsumeIdentifier, 'B'), + ':', + (tokenizer.ConsumeFloat, -float('inf')), + (tokenizer.ConsumeIdentifier, 'C'), + ':', + (tokenizer.ConsumeBool, True), + (tokenizer.ConsumeIdentifier, 'D'), + ':', + (tokenizer.ConsumeBool, False), + '}', + (tokenizer.ConsumeIdentifier, 'ID9'), + ':', + (tokenizer.ConsumeUint32, 22), + (tokenizer.ConsumeIdentifier, 'ID10'), + ':', + (tokenizer.ConsumeInt64, -111111111111111111), + (tokenizer.ConsumeIdentifier, 'ID11'), + ':', + (tokenizer.ConsumeInt32, -22), + (tokenizer.ConsumeIdentifier, 'ID12'), + ':', + (tokenizer.ConsumeUint64, 2222222222222222222), + (tokenizer.ConsumeIdentifier, 'ID13'), + ':', + (tokenizer.ConsumeFloat, 1.23456), + (tokenizer.ConsumeIdentifier, 'ID14'), + ':', + (tokenizer.ConsumeFloat, 1.2e+2), + (tokenizer.ConsumeIdentifier, 'false_bool'), + ':', + (tokenizer.ConsumeBool, False), + (tokenizer.ConsumeIdentifier, 'true_BOOL'), + ':', + (tokenizer.ConsumeBool, True), + (tokenizer.ConsumeIdentifier, 'true_bool1'), + ':', + (tokenizer.ConsumeBool, True), + (tokenizer.ConsumeIdentifier, 'false_BOOL1'), + ':', + (tokenizer.ConsumeBool, False)] + + i = 0 + while not tokenizer.AtEnd(): + m = methods[i] + if type(m) == str: + token = tokenizer.token + self.assertEqual(token, m) + tokenizer.NextToken() + else: + self.assertEqual(m[1], m[0]()) + i += 1 + + def testConsumeIntegers(self): + # This test only tests the failures in the integer parsing methods as well + # as the '0' special cases. + int64_max = (1 << 63) - 1 + uint32_max = (1 << 32) - 1 + text = '-1 %d %d' % (uint32_max + 1, int64_max + 1) + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint64) + self.assertEqual(-1, tokenizer.ConsumeInt32()) + + self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt32) + self.assertEqual(uint32_max + 1, tokenizer.ConsumeInt64()) + + self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt64) + self.assertEqual(int64_max + 1, tokenizer.ConsumeUint64()) + self.assertTrue(tokenizer.AtEnd()) + + text = '-0 -0 0 0' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertEqual(0, tokenizer.ConsumeUint32()) + self.assertEqual(0, tokenizer.ConsumeUint64()) + self.assertEqual(0, tokenizer.ConsumeUint32()) + self.assertEqual(0, tokenizer.ConsumeUint64()) + self.assertTrue(tokenizer.AtEnd()) + + def testConsumeByteString(self): + text = '"string1\'' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) + + text = 'string1"' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) + + text = '\n"\\xt"' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) + + text = '\n"\\"' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) + + text = '\n"\\x"' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString) + + def testConsumeBool(self): + text = 'not-a-bool' + tokenizer = text_format._Tokenizer(text.splitlines()) + self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/type_checkers.py b/deps/google/protobuf/internal/type_checkers.py new file mode 100644 index 00000000..f30ca6a8 --- /dev/null +++ b/deps/google/protobuf/internal/type_checkers.py @@ -0,0 +1,341 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + coresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import six + +if six.PY3: + long = int + +from google.protobuf.internal import api_implementation +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + +def SupportsOpenEnums(field_descriptor): + return field_descriptor.containing_type.syntax == "proto3" + +def GetTypeChecker(field): + """Returns a type checker for a message field of the specified types. + + Args: + field: FieldDescriptor object for this field. + + Returns: + An instance of TypeChecker which can be used to verify the types + of values assigned to a field of the specified type. + """ + if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and + field.type == _FieldDescriptor.TYPE_STRING): + return UnicodeValueChecker() + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + if SupportsOpenEnums(field): + # When open enums are supported, any int32 can be assigned. + return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] + else: + return EnumValueChecker(field.enum_type) + return _VALUE_CHECKERS[field.cpp_type] + + +# None of the typecheckers below make any attempt to guard against people +# subclassing builtin types and doing weird things. We're not trying to +# protect against malicious clients here, just people accidentally shooting +# themselves in the foot in obvious ways. + +class TypeChecker(object): + + """Type checker used to catch type errors as early as possible + when the client is setting scalar fields in protocol messages. + """ + + def __init__(self, *acceptable_types): + self._acceptable_types = acceptable_types + + def CheckValue(self, proposed_value): + """Type check the provided value and return it. + + The returned value might have been normalized to another type. + """ + if not isinstance(proposed_value, self._acceptable_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), self._acceptable_types)) + raise TypeError(message) + return proposed_value + + +# IntValueChecker and its subclasses perform integer type-checks +# and bounds-checks. +class IntValueChecker(object): + + """Checker used for integer fields. Performs type-check and range check.""" + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, six.integer_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), six.integer_types)) + raise TypeError(message) + if not self._MIN <= proposed_value <= self._MAX: + raise ValueError('Value out of range: %d' % proposed_value) + # We force 32-bit values to int and 64-bit values to long to make + # alternate implementations where the distinction is more significant + # (e.g. the C++ implementation) simpler. + proposed_value = self._TYPE(proposed_value) + return proposed_value + + def DefaultValue(self): + return 0 + + +class EnumValueChecker(object): + + """Checker used for enum fields. Performs type-check and range check.""" + + def __init__(self, enum_type): + self._enum_type = enum_type + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, six.integer_types): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), six.integer_types)) + raise TypeError(message) + if proposed_value not in self._enum_type.values_by_number: + raise ValueError('Unknown enum value: %d' % proposed_value) + return proposed_value + + def DefaultValue(self): + return self._enum_type.values[0].number + + +class UnicodeValueChecker(object): + + """Checker used for string fields. + + Always returns a unicode value, even if the input is of type str. + """ + + def CheckValue(self, proposed_value): + if not isinstance(proposed_value, (bytes, six.text_type)): + message = ('%.1024r has type %s, but expected one of: %s' % + (proposed_value, type(proposed_value), (bytes, six.text_type))) + raise TypeError(message) + + # If the value is of type 'bytes' make sure that it is valid UTF-8 data. + if isinstance(proposed_value, bytes): + try: + proposed_value = proposed_value.decode('utf-8') + except UnicodeDecodeError: + raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' + 'encoding. Non-UTF-8 strings must be converted to ' + 'unicode objects before being added.' % + (proposed_value)) + return proposed_value + + def DefaultValue(self): + return u"" + + +class Int32ValueChecker(IntValueChecker): + # We're sure to use ints instead of longs here since comparison may be more + # efficient. + _MIN = -2147483648 + _MAX = 2147483647 + _TYPE = int + + +class Uint32ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 32) - 1 + _TYPE = int + + +class Int64ValueChecker(IntValueChecker): + _MIN = -(1 << 63) + _MAX = (1 << 63) - 1 + _TYPE = long + + +class Uint64ValueChecker(IntValueChecker): + _MIN = 0 + _MAX = (1 << 64) - 1 + _TYPE = long + + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker( + float, int, long), + _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker( + float, int, long), + _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int), + _FieldDescriptor.CPPTYPE_STRING: TypeChecker(bytes), + } + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/deps/google/protobuf/internal/unknown_fields_test.py b/deps/google/protobuf/internal/unknown_fields_test.py new file mode 100644 index 00000000..9685b8b4 --- /dev/null +++ b/deps/google/protobuf/internal/unknown_fields_test.py @@ -0,0 +1,300 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Test for preservation of unknown fields in the pure Python implementation.""" + +__author__ = 'bohdank@google.com (Bohdan Koval)' + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import unittest_mset_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf import unittest_proto3_arena_pb2 +from google.protobuf.internal import api_implementation +from google.protobuf.internal import encoder +from google.protobuf.internal import message_set_extensions_pb2 +from google.protobuf.internal import missing_enum_values_pb2 +from google.protobuf.internal import test_util +from google.protobuf.internal import type_checkers + + +def SkipIfCppImplementation(func): + return unittest.skipIf( + api_implementation.Type() == 'cpp' and api_implementation.Version() == 2, + 'C++ implementation does not expose unknown fields to Python')(func) + + +class UnknownFieldsTest(unittest.TestCase): + + def setUp(self): + self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR + self.all_fields = unittest_pb2.TestAllTypes() + test_util.SetAllFields(self.all_fields) + self.all_fields_data = self.all_fields.SerializeToString() + self.empty_message = unittest_pb2.TestEmptyMessage() + self.empty_message.ParseFromString(self.all_fields_data) + + def testSerialize(self): + data = self.empty_message.SerializeToString() + + # Don't use assertEqual because we don't want to dump raw binary data to + # stdout. + self.assertTrue(data == self.all_fields_data) + + def testSerializeProto3(self): + # Verify that proto3 doesn't preserve unknown fields. + message = unittest_proto3_arena_pb2.TestEmptyMessage() + message.ParseFromString(self.all_fields_data) + self.assertEqual(0, len(message.SerializeToString())) + + def testByteSize(self): + self.assertEqual(self.all_fields.ByteSize(), self.empty_message.ByteSize()) + + def testListFields(self): + # Make sure ListFields doesn't return unknown fields. + self.assertEqual(0, len(self.empty_message.ListFields())) + + def testSerializeMessageSetWireFormatUnknownExtension(self): + # Create a message using the message set wire format with an unknown + # message. + raw = unittest_mset_pb2.RawMessageSet() + + # Add an unknown extension. + item = raw.item.add() + item.type_id = 98418603 + message1 = message_set_extensions_pb2.TestMessageSetExtension1() + message1.i = 12345 + item.message = message1.SerializeToString() + + serialized = raw.SerializeToString() + + # Parse message using the message set wire format. + proto = message_set_extensions_pb2.TestMessageSet() + proto.MergeFromString(serialized) + + # Verify that the unknown extension is serialized unchanged + reserialized = proto.SerializeToString() + new_raw = unittest_mset_pb2.RawMessageSet() + new_raw.MergeFromString(reserialized) + self.assertEqual(raw, new_raw) + + def testEquals(self): + message = unittest_pb2.TestEmptyMessage() + message.ParseFromString(self.all_fields_data) + self.assertEqual(self.empty_message, message) + + self.all_fields.ClearField('optional_string') + message.ParseFromString(self.all_fields.SerializeToString()) + self.assertNotEqual(self.empty_message, message) + + +class UnknownFieldsAccessorsTest(unittest.TestCase): + + def setUp(self): + self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR + self.all_fields = unittest_pb2.TestAllTypes() + test_util.SetAllFields(self.all_fields) + self.all_fields_data = self.all_fields.SerializeToString() + self.empty_message = unittest_pb2.TestEmptyMessage() + self.empty_message.ParseFromString(self.all_fields_data) + if api_implementation.Type() != 'cpp': + # _unknown_fields is an implementation detail. + self.unknown_fields = self.empty_message._unknown_fields + + # All the tests that use GetField() check an implementation detail of the + # Python implementation, which stores unknown fields as serialized strings. + # These tests are skipped by the C++ implementation: it's enough to check that + # the message is correctly serialized. + + def GetField(self, name): + field_descriptor = self.descriptor.fields_by_name[name] + wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type] + field_tag = encoder.TagBytes(field_descriptor.number, wire_type) + result_dict = {} + for tag_bytes, value in self.unknown_fields: + if tag_bytes == field_tag: + decoder = unittest_pb2.TestAllTypes._decoders_by_tag[tag_bytes][0] + decoder(value, 0, len(value), self.all_fields, result_dict) + return result_dict[field_descriptor] + + @SkipIfCppImplementation + def testEnum(self): + value = self.GetField('optional_nested_enum') + self.assertEqual(self.all_fields.optional_nested_enum, value) + + @SkipIfCppImplementation + def testRepeatedEnum(self): + value = self.GetField('repeated_nested_enum') + self.assertEqual(self.all_fields.repeated_nested_enum, value) + + @SkipIfCppImplementation + def testVarint(self): + value = self.GetField('optional_int32') + self.assertEqual(self.all_fields.optional_int32, value) + + @SkipIfCppImplementation + def testFixed32(self): + value = self.GetField('optional_fixed32') + self.assertEqual(self.all_fields.optional_fixed32, value) + + @SkipIfCppImplementation + def testFixed64(self): + value = self.GetField('optional_fixed64') + self.assertEqual(self.all_fields.optional_fixed64, value) + + @SkipIfCppImplementation + def testLengthDelimited(self): + value = self.GetField('optional_string') + self.assertEqual(self.all_fields.optional_string, value) + + @SkipIfCppImplementation + def testGroup(self): + value = self.GetField('optionalgroup') + self.assertEqual(self.all_fields.optionalgroup, value) + + def testCopyFrom(self): + message = unittest_pb2.TestEmptyMessage() + message.CopyFrom(self.empty_message) + self.assertEqual(message.SerializeToString(), self.all_fields_data) + + def testMergeFrom(self): + message = unittest_pb2.TestAllTypes() + message.optional_int32 = 1 + message.optional_uint32 = 2 + source = unittest_pb2.TestEmptyMessage() + source.ParseFromString(message.SerializeToString()) + + message.ClearField('optional_int32') + message.optional_int64 = 3 + message.optional_uint32 = 4 + destination = unittest_pb2.TestEmptyMessage() + destination.ParseFromString(message.SerializeToString()) + + destination.MergeFrom(source) + # Check that the fields where correctly merged, even stored in the unknown + # fields set. + message.ParseFromString(destination.SerializeToString()) + self.assertEqual(message.optional_int32, 1) + self.assertEqual(message.optional_uint32, 2) + self.assertEqual(message.optional_int64, 3) + + def testClear(self): + self.empty_message.Clear() + # All cleared, even unknown fields. + self.assertEqual(self.empty_message.SerializeToString(), b'') + + def testUnknownExtensions(self): + message = unittest_pb2.TestEmptyMessageWithExtensions() + message.ParseFromString(self.all_fields_data) + self.assertEqual(message.SerializeToString(), self.all_fields_data) + + +class UnknownEnumValuesTest(unittest.TestCase): + + def setUp(self): + self.descriptor = missing_enum_values_pb2.TestEnumValues.DESCRIPTOR + + self.message = missing_enum_values_pb2.TestEnumValues() + self.message.optional_nested_enum = ( + missing_enum_values_pb2.TestEnumValues.ZERO) + self.message.repeated_nested_enum.extend([ + missing_enum_values_pb2.TestEnumValues.ZERO, + missing_enum_values_pb2.TestEnumValues.ONE, + ]) + self.message.packed_nested_enum.extend([ + missing_enum_values_pb2.TestEnumValues.ZERO, + missing_enum_values_pb2.TestEnumValues.ONE, + ]) + self.message_data = self.message.SerializeToString() + self.missing_message = missing_enum_values_pb2.TestMissingEnumValues() + self.missing_message.ParseFromString(self.message_data) + if api_implementation.Type() != 'cpp': + # _unknown_fields is an implementation detail. + self.unknown_fields = self.missing_message._unknown_fields + + # All the tests that use GetField() check an implementation detail of the + # Python implementation, which stores unknown fields as serialized strings. + # These tests are skipped by the C++ implementation: it's enough to check that + # the message is correctly serialized. + + def GetField(self, name): + field_descriptor = self.descriptor.fields_by_name[name] + wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type] + field_tag = encoder.TagBytes(field_descriptor.number, wire_type) + result_dict = {} + for tag_bytes, value in self.unknown_fields: + if tag_bytes == field_tag: + decoder = missing_enum_values_pb2.TestEnumValues._decoders_by_tag[ + tag_bytes][0] + decoder(value, 0, len(value), self.message, result_dict) + return result_dict[field_descriptor] + + def testUnknownParseMismatchEnumValue(self): + just_string = missing_enum_values_pb2.JustString() + just_string.dummy = 'blah' + + missing = missing_enum_values_pb2.TestEnumValues() + # The parse is invalid, storing the string proto into the set of + # unknown fields. + missing.ParseFromString(just_string.SerializeToString()) + + # Fetching the enum field shouldn't crash, instead returning the + # default value. + self.assertEqual(missing.optional_nested_enum, 0) + + @SkipIfCppImplementation + def testUnknownEnumValue(self): + self.assertFalse(self.missing_message.HasField('optional_nested_enum')) + value = self.GetField('optional_nested_enum') + self.assertEqual(self.message.optional_nested_enum, value) + + @SkipIfCppImplementation + def testUnknownRepeatedEnumValue(self): + value = self.GetField('repeated_nested_enum') + self.assertEqual(self.message.repeated_nested_enum, value) + + @SkipIfCppImplementation + def testUnknownPackedEnumValue(self): + value = self.GetField('packed_nested_enum') + self.assertEqual(self.message.packed_nested_enum, value) + + def testRoundTrip(self): + new_message = missing_enum_values_pb2.TestEnumValues() + new_message.ParseFromString(self.missing_message.SerializeToString()) + self.assertEqual(self.message, new_message) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/well_known_types.py b/deps/google/protobuf/internal/well_known_types.py new file mode 100644 index 00000000..d3de9831 --- /dev/null +++ b/deps/google/protobuf/internal/well_known_types.py @@ -0,0 +1,622 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains well known classes. + +This files defines well known classes which need extra maintenance including: + - Any + - Duration + - FieldMask + - Timestamp +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from datetime import datetime +from datetime import timedelta + +from google.protobuf.descriptor import FieldDescriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_NANOS_PER_SECOND = 1000000000 +_NANOS_PER_MILLISECOND = 1000000 +_NANOS_PER_MICROSECOND = 1000 +_MILLIS_PER_SECOND = 1000 +_MICROS_PER_SECOND = 1000000 +_SECONDS_PER_DAY = 24 * 3600 + + +class Error(Exception): + """Top-level module error.""" + + +class ParseError(Error): + """Thrown in case of parsing error.""" + + +class Any(object): + """Class for Any Message type.""" + + def Pack(self, msg): + """Packs the specified message into current Any message.""" + self.type_url = 'type.googleapis.com/%s' % msg.DESCRIPTOR.full_name + self.value = msg.SerializeToString() + + def Unpack(self, msg): + """Unpacks the current Any message into specified message.""" + descriptor = msg.DESCRIPTOR + if not self.Is(descriptor): + return False + msg.ParseFromString(self.value) + return True + + def Is(self, descriptor): + """Checks if this Any represents the given protobuf type.""" + # Only last part is to be used: b/25630112 + return self.type_url.split('/')[-1] == descriptor.full_name + + +class Timestamp(object): + """Class for Timestamp message type.""" + + def ToJsonString(self): + """Converts Timestamp to RFC 3339 date string format. + + Returns: + A string converted from timestamp. The string is always Z-normalized + and uses 3, 6 or 9 fractional digits as required to represent the + exact time. Example of the return format: '1972-01-01T10:00:20.021Z' + """ + nanos = self.nanos % _NANOS_PER_SECOND + total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND + seconds = total_sec % _SECONDS_PER_DAY + days = (total_sec - seconds) // _SECONDS_PER_DAY + dt = datetime(1970, 1, 1) + timedelta(days, seconds) + + result = dt.isoformat() + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 'Z' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03dZ' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06dZ' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09dZ' % nanos + + def FromJsonString(self, value): + """Parse a RFC 3339 date string format to Timestamp. + + Args: + value: A date string. Any fractional digits (or none) and any offset are + accepted as long as they fit into nano-seconds precision. + Example of accepted format: '1972-01-01T10:00:20.021-05:00' + + Raises: + ParseError: On parsing problems. + """ + timezone_offset = value.find('Z') + if timezone_offset == -1: + timezone_offset = value.find('+') + if timezone_offset == -1: + timezone_offset = value.rfind('-') + if timezone_offset == -1: + raise ParseError( + 'Failed to parse timestamp: missing valid timezone offset.') + time_value = value[0:timezone_offset] + # Parse datetime and nanos. + point_position = time_value.find('.') + if point_position == -1: + second_value = time_value + nano_value = '' + else: + second_value = time_value[:point_position] + nano_value = time_value[point_position + 1:] + date_object = datetime.strptime(second_value, _TIMESTAMPFOMAT) + td = date_object - datetime(1970, 1, 1) + seconds = td.seconds + td.days * _SECONDS_PER_DAY + if len(nano_value) > 9: + raise ParseError( + 'Failed to parse Timestamp: nanos {0} more than ' + '9 fractional digits.'.format(nano_value)) + if nano_value: + nanos = round(float('0.' + nano_value) * 1e9) + else: + nanos = 0 + # Parse timezone offsets. + if value[timezone_offset] == 'Z': + if len(value) != timezone_offset + 1: + raise ParseError('Failed to parse timestamp: invalid trailing' + ' data {0}.'.format(value)) + else: + timezone = value[timezone_offset:] + pos = timezone.find(':') + if pos == -1: + raise ParseError( + 'Invalid timezone offset value: {0}.'.format(timezone)) + if timezone[0] == '+': + seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + else: + seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 + # Set seconds and nanos + self.seconds = int(seconds) + self.nanos = int(nanos) + + def GetCurrentTime(self): + """Get the current UTC into Timestamp.""" + self.FromDatetime(datetime.utcnow()) + + def ToNanoseconds(self): + """Converts Timestamp to nanoseconds since epoch.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts Timestamp to microseconds since epoch.""" + return (self.seconds * _MICROS_PER_SECOND + + self.nanos // _NANOS_PER_MICROSECOND) + + def ToMilliseconds(self): + """Converts Timestamp to milliseconds since epoch.""" + return (self.seconds * _MILLIS_PER_SECOND + + self.nanos // _NANOS_PER_MILLISECOND) + + def ToSeconds(self): + """Converts Timestamp to seconds since epoch.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds since epoch to Timestamp.""" + self.seconds = nanos // _NANOS_PER_SECOND + self.nanos = nanos % _NANOS_PER_SECOND + + def FromMicroseconds(self, micros): + """Converts microseconds since epoch to Timestamp.""" + self.seconds = micros // _MICROS_PER_SECOND + self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND + + def FromMilliseconds(self, millis): + """Converts milliseconds since epoch to Timestamp.""" + self.seconds = millis // _MILLIS_PER_SECOND + self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND + + def FromSeconds(self, seconds): + """Converts seconds since epoch to Timestamp.""" + self.seconds = seconds + self.nanos = 0 + + def ToDatetime(self): + """Converts Timestamp to datetime.""" + return datetime.utcfromtimestamp( + self.seconds + self.nanos / float(_NANOS_PER_SECOND)) + + def FromDatetime(self, dt): + """Converts datetime to Timestamp.""" + td = dt - datetime(1970, 1, 1) + self.seconds = td.seconds + td.days * _SECONDS_PER_DAY + self.nanos = td.microseconds * _NANOS_PER_MICROSECOND + + +class Duration(object): + """Class for Duration message type.""" + + def ToJsonString(self): + """Converts Duration to string format. + + Returns: + A string converted from self. The string format will contains + 3, 6, or 9 fractional digits depending on the precision required to + represent the exact Duration value. For example: "1s", "1.010s", + "1.000000100s", "-3.100s" + """ + if self.seconds < 0 or self.nanos < 0: + result = '-' + seconds = - self.seconds + int((0 - self.nanos) // 1e9) + nanos = (0 - self.nanos) % 1e9 + else: + result = '' + seconds = self.seconds + int(self.nanos // 1e9) + nanos = self.nanos % 1e9 + result += '%d' % seconds + if (nanos % 1e9) == 0: + # If there are 0 fractional digits, the fractional + # point '.' should be omitted when serializing. + return result + 's' + if (nanos % 1e6) == 0: + # Serialize 3 fractional digits. + return result + '.%03ds' % (nanos / 1e6) + if (nanos % 1e3) == 0: + # Serialize 6 fractional digits. + return result + '.%06ds' % (nanos / 1e3) + # Serialize 9 fractional digits. + return result + '.%09ds' % nanos + + def FromJsonString(self, value): + """Converts a string to Duration. + + Args: + value: A string to be converted. The string must end with 's'. Any + fractional digits (or none) are accepted as long as they fit into + precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s + + Raises: + ParseError: On parsing problems. + """ + if len(value) < 1 or value[-1] != 's': + raise ParseError( + 'Duration must end with letter "s": {0}.'.format(value)) + try: + pos = value.find('.') + if pos == -1: + self.seconds = int(value[:-1]) + self.nanos = 0 + else: + self.seconds = int(value[:pos]) + if value[0] == '-': + self.nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) + else: + self.nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) + except ValueError: + raise ParseError( + 'Couldn\'t parse duration: {0}.'.format(value)) + + def ToNanoseconds(self): + """Converts a Duration to nanoseconds.""" + return self.seconds * _NANOS_PER_SECOND + self.nanos + + def ToMicroseconds(self): + """Converts a Duration to microseconds.""" + micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) + return self.seconds * _MICROS_PER_SECOND + micros + + def ToMilliseconds(self): + """Converts a Duration to milliseconds.""" + millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) + return self.seconds * _MILLIS_PER_SECOND + millis + + def ToSeconds(self): + """Converts a Duration to seconds.""" + return self.seconds + + def FromNanoseconds(self, nanos): + """Converts nanoseconds to Duration.""" + self._NormalizeDuration(nanos // _NANOS_PER_SECOND, + nanos % _NANOS_PER_SECOND) + + def FromMicroseconds(self, micros): + """Converts microseconds to Duration.""" + self._NormalizeDuration( + micros // _MICROS_PER_SECOND, + (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) + + def FromMilliseconds(self, millis): + """Converts milliseconds to Duration.""" + self._NormalizeDuration( + millis // _MILLIS_PER_SECOND, + (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) + + def FromSeconds(self, seconds): + """Converts seconds to Duration.""" + self.seconds = seconds + self.nanos = 0 + + def ToTimedelta(self): + """Converts Duration to timedelta.""" + return timedelta( + seconds=self.seconds, microseconds=_RoundTowardZero( + self.nanos, _NANOS_PER_MICROSECOND)) + + def FromTimedelta(self, td): + """Convertd timedelta to Duration.""" + self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, + td.microseconds * _NANOS_PER_MICROSECOND) + + def _NormalizeDuration(self, seconds, nanos): + """Set Duration by seconds and nonas.""" + # Force nanos to be negative if the duration is negative. + if seconds < 0 and nanos > 0: + seconds += 1 + nanos -= _NANOS_PER_SECOND + self.seconds = seconds + self.nanos = nanos + + +def _RoundTowardZero(value, divider): + """Truncates the remainder part after division.""" + # For some languanges, the sign of the remainder is implementation + # dependent if any of the operands is negative. Here we enforce + # "rounded toward zero" semantics. For example, for (-5) / 2 an + # implementation may give -3 as the result with the remainder being + # 1. This function ensures we always return -2 (closer to zero). + result = value // divider + remainder = value % divider + if result < 0 and remainder > 0: + return result + 1 + else: + return result + + +class FieldMask(object): + """Class for FieldMask message type.""" + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + return ','.join(self.paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + self.Clear() + for path in value.split(','): + self.paths.append(path) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name[name] + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + for item in repeated_source: + repeated_destination.add().MergeFrom(item) + else: + repeated_destination.extend(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) + + +WKTBASES = { + 'google.protobuf.Any': Any, + 'google.protobuf.Duration': Duration, + 'google.protobuf.FieldMask': FieldMask, + 'google.protobuf.Timestamp': Timestamp, +} diff --git a/deps/google/protobuf/internal/well_known_types_test.py b/deps/google/protobuf/internal/well_known_types_test.py new file mode 100644 index 00000000..0e31e6f8 --- /dev/null +++ b/deps/google/protobuf/internal/well_known_types_test.py @@ -0,0 +1,513 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Test for google.protobuf.internal.well_known_types.""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from datetime import datetime + +try: + import unittest2 as unittest +except ImportError: + import unittest + +from google.protobuf import duration_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf import unittest_pb2 +from google.protobuf.internal import test_util +from google.protobuf.internal import well_known_types +from google.protobuf import descriptor + + +class TimeUtilTestBase(unittest.TestCase): + + def CheckTimestampConversion(self, message, text): + self.assertEqual(text, message.ToJsonString()) + parsed_message = timestamp_pb2.Timestamp() + parsed_message.FromJsonString(text) + self.assertEqual(message, parsed_message) + + def CheckDurationConversion(self, message, text): + self.assertEqual(text, message.ToJsonString()) + parsed_message = duration_pb2.Duration() + parsed_message.FromJsonString(text) + self.assertEqual(message, parsed_message) + + +class TimeUtilTest(TimeUtilTestBase): + + def testTimestampSerializeAndParse(self): + message = timestamp_pb2.Timestamp() + # Generated output should contain 3, 6, or 9 fractional digits. + message.seconds = 0 + message.nanos = 0 + self.CheckTimestampConversion(message, '1970-01-01T00:00:00Z') + message.nanos = 10000000 + self.CheckTimestampConversion(message, '1970-01-01T00:00:00.010Z') + message.nanos = 10000 + self.CheckTimestampConversion(message, '1970-01-01T00:00:00.000010Z') + message.nanos = 10 + self.CheckTimestampConversion(message, '1970-01-01T00:00:00.000000010Z') + # Test min timestamps. + message.seconds = -62135596800 + message.nanos = 0 + self.CheckTimestampConversion(message, '0001-01-01T00:00:00Z') + # Test max timestamps. + message.seconds = 253402300799 + message.nanos = 999999999 + self.CheckTimestampConversion(message, '9999-12-31T23:59:59.999999999Z') + # Test negative timestamps. + message.seconds = -1 + self.CheckTimestampConversion(message, '1969-12-31T23:59:59.999999999Z') + + # Parsing accepts an fractional digits as long as they fit into nano + # precision. + message.FromJsonString('1970-01-01T00:00:00.1Z') + self.assertEqual(0, message.seconds) + self.assertEqual(100000000, message.nanos) + # Parsing accpets offsets. + message.FromJsonString('1970-01-01T00:00:00-08:00') + self.assertEqual(8 * 3600, message.seconds) + self.assertEqual(0, message.nanos) + + def testDurationSerializeAndParse(self): + message = duration_pb2.Duration() + # Generated output should contain 3, 6, or 9 fractional digits. + message.seconds = 0 + message.nanos = 0 + self.CheckDurationConversion(message, '0s') + message.nanos = 10000000 + self.CheckDurationConversion(message, '0.010s') + message.nanos = 10000 + self.CheckDurationConversion(message, '0.000010s') + message.nanos = 10 + self.CheckDurationConversion(message, '0.000000010s') + + # Test min and max + message.seconds = 315576000000 + message.nanos = 999999999 + self.CheckDurationConversion(message, '315576000000.999999999s') + message.seconds = -315576000000 + message.nanos = -999999999 + self.CheckDurationConversion(message, '-315576000000.999999999s') + + # Parsing accepts an fractional digits as long as they fit into nano + # precision. + message.FromJsonString('0.1s') + self.assertEqual(100000000, message.nanos) + message.FromJsonString('0.0000001s') + self.assertEqual(100, message.nanos) + + def testTimestampIntegerConversion(self): + message = timestamp_pb2.Timestamp() + message.FromNanoseconds(1) + self.assertEqual('1970-01-01T00:00:00.000000001Z', + message.ToJsonString()) + self.assertEqual(1, message.ToNanoseconds()) + + message.FromNanoseconds(-1) + self.assertEqual('1969-12-31T23:59:59.999999999Z', + message.ToJsonString()) + self.assertEqual(-1, message.ToNanoseconds()) + + message.FromMicroseconds(1) + self.assertEqual('1970-01-01T00:00:00.000001Z', + message.ToJsonString()) + self.assertEqual(1, message.ToMicroseconds()) + + message.FromMicroseconds(-1) + self.assertEqual('1969-12-31T23:59:59.999999Z', + message.ToJsonString()) + self.assertEqual(-1, message.ToMicroseconds()) + + message.FromMilliseconds(1) + self.assertEqual('1970-01-01T00:00:00.001Z', + message.ToJsonString()) + self.assertEqual(1, message.ToMilliseconds()) + + message.FromMilliseconds(-1) + self.assertEqual('1969-12-31T23:59:59.999Z', + message.ToJsonString()) + self.assertEqual(-1, message.ToMilliseconds()) + + message.FromSeconds(1) + self.assertEqual('1970-01-01T00:00:01Z', + message.ToJsonString()) + self.assertEqual(1, message.ToSeconds()) + + message.FromSeconds(-1) + self.assertEqual('1969-12-31T23:59:59Z', + message.ToJsonString()) + self.assertEqual(-1, message.ToSeconds()) + + message.FromNanoseconds(1999) + self.assertEqual(1, message.ToMicroseconds()) + # For negative values, Timestamp will be rounded down. + # For example, "1969-12-31T23:59:59.5Z" (i.e., -0.5s) rounded to seconds + # will be "1969-12-31T23:59:59Z" (i.e., -1s) rather than + # "1970-01-01T00:00:00Z" (i.e., 0s). + message.FromNanoseconds(-1999) + self.assertEqual(-2, message.ToMicroseconds()) + + def testDurationIntegerConversion(self): + message = duration_pb2.Duration() + message.FromNanoseconds(1) + self.assertEqual('0.000000001s', + message.ToJsonString()) + self.assertEqual(1, message.ToNanoseconds()) + + message.FromNanoseconds(-1) + self.assertEqual('-0.000000001s', + message.ToJsonString()) + self.assertEqual(-1, message.ToNanoseconds()) + + message.FromMicroseconds(1) + self.assertEqual('0.000001s', + message.ToJsonString()) + self.assertEqual(1, message.ToMicroseconds()) + + message.FromMicroseconds(-1) + self.assertEqual('-0.000001s', + message.ToJsonString()) + self.assertEqual(-1, message.ToMicroseconds()) + + message.FromMilliseconds(1) + self.assertEqual('0.001s', + message.ToJsonString()) + self.assertEqual(1, message.ToMilliseconds()) + + message.FromMilliseconds(-1) + self.assertEqual('-0.001s', + message.ToJsonString()) + self.assertEqual(-1, message.ToMilliseconds()) + + message.FromSeconds(1) + self.assertEqual('1s', message.ToJsonString()) + self.assertEqual(1, message.ToSeconds()) + + message.FromSeconds(-1) + self.assertEqual('-1s', + message.ToJsonString()) + self.assertEqual(-1, message.ToSeconds()) + + # Test truncation behavior. + message.FromNanoseconds(1999) + self.assertEqual(1, message.ToMicroseconds()) + + # For negative values, Duration will be rounded towards 0. + message.FromNanoseconds(-1999) + self.assertEqual(-1, message.ToMicroseconds()) + + def testDatetimeConverison(self): + message = timestamp_pb2.Timestamp() + dt = datetime(1970, 1, 1) + message.FromDatetime(dt) + self.assertEqual(dt, message.ToDatetime()) + + message.FromMilliseconds(1999) + self.assertEqual(datetime(1970, 1, 1, 0, 0, 1, 999000), + message.ToDatetime()) + + def testTimedeltaConversion(self): + message = duration_pb2.Duration() + message.FromNanoseconds(1999999999) + td = message.ToTimedelta() + self.assertEqual(1, td.seconds) + self.assertEqual(999999, td.microseconds) + + message.FromNanoseconds(-1999999999) + td = message.ToTimedelta() + self.assertEqual(-1, td.days) + self.assertEqual(86398, td.seconds) + self.assertEqual(1, td.microseconds) + + message.FromMicroseconds(-1) + td = message.ToTimedelta() + self.assertEqual(-1, td.days) + self.assertEqual(86399, td.seconds) + self.assertEqual(999999, td.microseconds) + converted_message = duration_pb2.Duration() + converted_message.FromTimedelta(td) + self.assertEqual(message, converted_message) + + def testInvalidTimestamp(self): + message = timestamp_pb2.Timestamp() + self.assertRaisesRegexp( + ValueError, + 'time data \'10000-01-01T00:00:00\' does not match' + ' format \'%Y-%m-%dT%H:%M:%S\'', + message.FromJsonString, '10000-01-01T00:00:00.00Z') + self.assertRaisesRegexp( + well_known_types.ParseError, + 'nanos 0123456789012 more than 9 fractional digits.', + message.FromJsonString, + '1970-01-01T00:00:00.0123456789012Z') + self.assertRaisesRegexp( + well_known_types.ParseError, + (r'Invalid timezone offset value: \+08.'), + message.FromJsonString, + '1972-01-01T01:00:00.01+08',) + self.assertRaisesRegexp( + ValueError, + 'year is out of range', + message.FromJsonString, + '0000-01-01T00:00:00Z') + message.seconds = 253402300800 + self.assertRaisesRegexp( + OverflowError, + 'date value out of range', + message.ToJsonString) + + def testInvalidDuration(self): + message = duration_pb2.Duration() + self.assertRaisesRegexp( + well_known_types.ParseError, + 'Duration must end with letter "s": 1.', + message.FromJsonString, '1') + self.assertRaisesRegexp( + well_known_types.ParseError, + 'Couldn\'t parse duration: 1...2s.', + message.FromJsonString, '1...2s') + + +class FieldMaskTest(unittest.TestCase): + + def testStringFormat(self): + mask = field_mask_pb2.FieldMask() + self.assertEqual('', mask.ToJsonString()) + mask.paths.append('foo') + self.assertEqual('foo', mask.ToJsonString()) + mask.paths.append('bar') + self.assertEqual('foo,bar', mask.ToJsonString()) + + mask.FromJsonString('') + self.assertEqual('', mask.ToJsonString()) + mask.FromJsonString('foo') + self.assertEqual(['foo'], mask.paths) + mask.FromJsonString('foo,bar') + self.assertEqual(['foo', 'bar'], mask.paths) + + def testDescriptorToFieldMask(self): + mask = field_mask_pb2.FieldMask() + msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR + mask.AllFieldsFromDescriptor(msg_descriptor) + self.assertEqual(75, len(mask.paths)) + self.assertTrue(mask.IsValidForDescriptor(msg_descriptor)) + for field in msg_descriptor.fields: + self.assertTrue(field.name in mask.paths) + mask.paths.append('optional_nested_message.bb') + self.assertTrue(mask.IsValidForDescriptor(msg_descriptor)) + mask.paths.append('repeated_nested_message.bb') + self.assertFalse(mask.IsValidForDescriptor(msg_descriptor)) + + def testCanonicalFrom(self): + mask = field_mask_pb2.FieldMask() + out_mask = field_mask_pb2.FieldMask() + # Paths will be sorted. + mask.FromJsonString('baz.quz,bar,foo') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('bar,baz.quz,foo', out_mask.ToJsonString()) + # Duplicated paths will be removed. + mask.FromJsonString('foo,bar,foo') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('bar,foo', out_mask.ToJsonString()) + # Sub-paths of other paths will be removed. + mask.FromJsonString('foo.b1,bar.b1,foo.b2,bar') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('bar,foo.b1,foo.b2', out_mask.ToJsonString()) + + # Test more deeply nested cases. + mask.FromJsonString( + 'foo.bar.baz1,foo.bar.baz2.quz,foo.bar.baz2') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('foo.bar.baz1,foo.bar.baz2', + out_mask.ToJsonString()) + mask.FromJsonString( + 'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('foo.bar.baz1,foo.bar.baz2', + out_mask.ToJsonString()) + mask.FromJsonString( + 'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo.bar') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('foo.bar', out_mask.ToJsonString()) + mask.FromJsonString( + 'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo') + out_mask.CanonicalFormFromMask(mask) + self.assertEqual('foo', out_mask.ToJsonString()) + + def testUnion(self): + mask1 = field_mask_pb2.FieldMask() + mask2 = field_mask_pb2.FieldMask() + out_mask = field_mask_pb2.FieldMask() + mask1.FromJsonString('foo,baz') + mask2.FromJsonString('bar,quz') + out_mask.Union(mask1, mask2) + self.assertEqual('bar,baz,foo,quz', out_mask.ToJsonString()) + # Overlap with duplicated paths. + mask1.FromJsonString('foo,baz.bb') + mask2.FromJsonString('baz.bb,quz') + out_mask.Union(mask1, mask2) + self.assertEqual('baz.bb,foo,quz', out_mask.ToJsonString()) + # Overlap with paths covering some other paths. + mask1.FromJsonString('foo.bar.baz,quz') + mask2.FromJsonString('foo.bar,bar') + out_mask.Union(mask1, mask2) + self.assertEqual('bar,foo.bar,quz', out_mask.ToJsonString()) + + def testIntersect(self): + mask1 = field_mask_pb2.FieldMask() + mask2 = field_mask_pb2.FieldMask() + out_mask = field_mask_pb2.FieldMask() + # Test cases without overlapping. + mask1.FromJsonString('foo,baz') + mask2.FromJsonString('bar,quz') + out_mask.Intersect(mask1, mask2) + self.assertEqual('', out_mask.ToJsonString()) + # Overlap with duplicated paths. + mask1.FromJsonString('foo,baz.bb') + mask2.FromJsonString('baz.bb,quz') + out_mask.Intersect(mask1, mask2) + self.assertEqual('baz.bb', out_mask.ToJsonString()) + # Overlap with paths covering some other paths. + mask1.FromJsonString('foo.bar.baz,quz') + mask2.FromJsonString('foo.bar,bar') + out_mask.Intersect(mask1, mask2) + self.assertEqual('foo.bar.baz', out_mask.ToJsonString()) + mask1.FromJsonString('foo.bar,bar') + mask2.FromJsonString('foo.bar.baz,quz') + out_mask.Intersect(mask1, mask2) + self.assertEqual('foo.bar.baz', out_mask.ToJsonString()) + + def testMergeMessage(self): + # Test merge one field. + src = unittest_pb2.TestAllTypes() + test_util.SetAllFields(src) + for field in src.DESCRIPTOR.fields: + if field.containing_oneof: + continue + field_name = field.name + dst = unittest_pb2.TestAllTypes() + # Only set one path to mask. + mask = field_mask_pb2.FieldMask() + mask.paths.append(field_name) + mask.MergeMessage(src, dst) + # The expected result message. + msg = unittest_pb2.TestAllTypes() + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + repeated_src = getattr(src, field_name) + repeated_msg = getattr(msg, field_name) + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + for item in repeated_src: + repeated_msg.add().CopyFrom(item) + else: + repeated_msg.extend(repeated_src) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + getattr(msg, field_name).CopyFrom(getattr(src, field_name)) + else: + setattr(msg, field_name, getattr(src, field_name)) + # Only field specified in mask is merged. + self.assertEqual(msg, dst) + + # Test merge nested fields. + nested_src = unittest_pb2.NestedTestAllTypes() + nested_dst = unittest_pb2.NestedTestAllTypes() + nested_src.child.payload.optional_int32 = 1234 + nested_src.child.child.payload.optional_int32 = 5678 + mask = field_mask_pb2.FieldMask() + mask.FromJsonString('child.payload') + mask.MergeMessage(nested_src, nested_dst) + self.assertEqual(1234, nested_dst.child.payload.optional_int32) + self.assertEqual(0, nested_dst.child.child.payload.optional_int32) + + mask.FromJsonString('child.child.payload') + mask.MergeMessage(nested_src, nested_dst) + self.assertEqual(1234, nested_dst.child.payload.optional_int32) + self.assertEqual(5678, nested_dst.child.child.payload.optional_int32) + + nested_dst.Clear() + mask.FromJsonString('child.child.payload') + mask.MergeMessage(nested_src, nested_dst) + self.assertEqual(0, nested_dst.child.payload.optional_int32) + self.assertEqual(5678, nested_dst.child.child.payload.optional_int32) + + nested_dst.Clear() + mask.FromJsonString('child') + mask.MergeMessage(nested_src, nested_dst) + self.assertEqual(1234, nested_dst.child.payload.optional_int32) + self.assertEqual(5678, nested_dst.child.child.payload.optional_int32) + + # Test MergeOptions. + nested_dst.Clear() + nested_dst.child.payload.optional_int64 = 4321 + # Message fields will be merged by default. + mask.FromJsonString('child.payload') + mask.MergeMessage(nested_src, nested_dst) + self.assertEqual(1234, nested_dst.child.payload.optional_int32) + self.assertEqual(4321, nested_dst.child.payload.optional_int64) + # Change the behavior to replace message fields. + mask.FromJsonString('child.payload') + mask.MergeMessage(nested_src, nested_dst, True, False) + self.assertEqual(1234, nested_dst.child.payload.optional_int32) + self.assertEqual(0, nested_dst.child.payload.optional_int64) + + # By default, fields missing in source are not cleared in destination. + nested_dst.payload.optional_int32 = 1234 + self.assertTrue(nested_dst.HasField('payload')) + mask.FromJsonString('payload') + mask.MergeMessage(nested_src, nested_dst) + self.assertTrue(nested_dst.HasField('payload')) + # But they are cleared when replacing message fields. + nested_dst.Clear() + nested_dst.payload.optional_int32 = 1234 + mask.FromJsonString('payload') + mask.MergeMessage(nested_src, nested_dst, True, False) + self.assertFalse(nested_dst.HasField('payload')) + + nested_src.payload.repeated_int32.append(1234) + nested_dst.payload.repeated_int32.append(5678) + # Repeated fields will be appended by default. + mask.FromJsonString('payload.repeated_int32') + mask.MergeMessage(nested_src, nested_dst) + self.assertEqual(2, len(nested_dst.payload.repeated_int32)) + self.assertEqual(5678, nested_dst.payload.repeated_int32[0]) + self.assertEqual(1234, nested_dst.payload.repeated_int32[1]) + # Change the behavior to replace repeated fields. + mask.FromJsonString('payload.repeated_int32') + mask.MergeMessage(nested_src, nested_dst, False, True) + self.assertEqual(1, len(nested_dst.payload.repeated_int32)) + self.assertEqual(1234, nested_dst.payload.repeated_int32[0]) + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/internal/wire_format.py b/deps/google/protobuf/internal/wire_format.py new file mode 100644 index 00000000..883f5255 --- /dev/null +++ b/deps/google/protobuf/internal/wire_format.py @@ -0,0 +1,268 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Constants and static functions to support protocol buffer wire format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +from google.protobuf import descriptor +from google.protobuf import message + + +TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. +TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 + +# These numbers identify the wire type of a protocol buffer value. +# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded +# tag-and-type to store one of these WIRETYPE_* constants. +# These values must match WireType enum in google/protobuf/wire_format.h. +WIRETYPE_VARINT = 0 +WIRETYPE_FIXED64 = 1 +WIRETYPE_LENGTH_DELIMITED = 2 +WIRETYPE_START_GROUP = 3 +WIRETYPE_END_GROUP = 4 +WIRETYPE_FIXED32 = 5 +_WIRETYPE_MAX = 5 + + +# Bounds for various integer types. +INT32_MAX = int((1 << 31) - 1) +INT32_MIN = int(-(1 << 31)) +UINT32_MAX = (1 << 32) - 1 + +INT64_MAX = (1 << 63) - 1 +INT64_MIN = -(1 << 63) +UINT64_MAX = (1 << 64) - 1 + +# "struct" format strings that will encode/decode the specified formats. +FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) + + +def ZigZagEncode(value): + """ZigZag Transform: Encodes signed integers so that they can be + effectively used with varint encoding. See wire_format.h for + more details. + """ + if value >= 0: + return value << 1 + return (value << 1) ^ (~0) + + +def ZigZagDecode(value): + """Inverse of ZigZagEncode().""" + if not value & 0x1: + return value >> 1 + return (value >> 1) ^ (~0) + + + +# The *ByteSize() functions below return the number of bytes required to +# serialize "field number + type" information and then serialize the value. + + +def Int32ByteSize(field_number, int32): + return Int64ByteSize(field_number, int32) + + +def Int32ByteSizeNoTag(int32): + return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) + + +def Int64ByteSize(field_number, int64): + # Have to convert to uint before calling UInt64ByteSize(). + return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) + + +def UInt32ByteSize(field_number, uint32): + return UInt64ByteSize(field_number, uint32) + + +def UInt64ByteSize(field_number, uint64): + return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) + + +def SInt32ByteSize(field_number, int32): + return UInt32ByteSize(field_number, ZigZagEncode(int32)) + + +def SInt64ByteSize(field_number, int64): + return UInt64ByteSize(field_number, ZigZagEncode(int64)) + + +def Fixed32ByteSize(field_number, fixed32): + return TagByteSize(field_number) + 4 + + +def Fixed64ByteSize(field_number, fixed64): + return TagByteSize(field_number) + 8 + + +def SFixed32ByteSize(field_number, sfixed32): + return TagByteSize(field_number) + 4 + + +def SFixed64ByteSize(field_number, sfixed64): + return TagByteSize(field_number) + 8 + + +def FloatByteSize(field_number, flt): + return TagByteSize(field_number) + 4 + + +def DoubleByteSize(field_number, double): + return TagByteSize(field_number) + 8 + + +def BoolByteSize(field_number, b): + return TagByteSize(field_number) + 1 + + +def EnumByteSize(field_number, enum): + return UInt32ByteSize(field_number, enum) + + +def StringByteSize(field_number, string): + return BytesByteSize(field_number, string.encode('utf-8')) + + +def BytesByteSize(field_number, b): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(len(b)) + + len(b)) + + +def GroupByteSize(field_number, message): + return (2 * TagByteSize(field_number) # START and END group. + + message.ByteSize()) + + +def MessageByteSize(field_number, message): + return (TagByteSize(field_number) + + _VarUInt64ByteSizeNoTag(message.ByteSize()) + + message.ByteSize()) + + +def MessageSetItemByteSize(field_number, msg): + # First compute the sizes of the tags. + # There are 2 tags for the beginning and ending of the repeated group, that + # is field number 1, one with field number 2 (type_id) and one with field + # number 3 (message). + total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) + + # Add the number of bytes for type_id. + total_size += _VarUInt64ByteSizeNoTag(field_number) + + message_size = msg.ByteSize() + + # The number of bytes for encoding the length of the message. + total_size += _VarUInt64ByteSizeNoTag(message_size) + + # The size of the message. + total_size += message_size + return total_size + + +def TagByteSize(field_number): + """Returns the bytes required to serialize a tag with this field number.""" + # Just pass in type 0, since the type won't affect the tag+type size. + return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) + + +# Private helper function for the *ByteSize() functions above. + +def _VarUInt64ByteSizeNoTag(uint64): + """Returns the number of bytes required to serialize a single varint + using boundary value comparisons. (unrolled loop optimization -WPierce) + uint64 must be unsigned. + """ + if uint64 <= 0x7f: return 1 + if uint64 <= 0x3fff: return 2 + if uint64 <= 0x1fffff: return 3 + if uint64 <= 0xfffffff: return 4 + if uint64 <= 0x7ffffffff: return 5 + if uint64 <= 0x3ffffffffff: return 6 + if uint64 <= 0x1ffffffffffff: return 7 + if uint64 <= 0xffffffffffffff: return 8 + if uint64 <= 0x7fffffffffffffff: return 9 + if uint64 > UINT64_MAX: + raise message.EncodeError('Value out of range: %d' % uint64) + return 10 + + +NON_PACKABLE_TYPES = ( + descriptor.FieldDescriptor.TYPE_STRING, + descriptor.FieldDescriptor.TYPE_GROUP, + descriptor.FieldDescriptor.TYPE_MESSAGE, + descriptor.FieldDescriptor.TYPE_BYTES +) + + +def IsTypePackable(field_type): + """Return true iff packable = true is valid for fields of this type. + + Args: + field_type: a FieldDescriptor::Type value. + + Returns: + True iff fields of this type are packable. + """ + return field_type not in NON_PACKABLE_TYPES diff --git a/deps/google/protobuf/internal/wire_format_test.py b/deps/google/protobuf/internal/wire_format_test.py new file mode 100644 index 00000000..f659d18e --- /dev/null +++ b/deps/google/protobuf/internal/wire_format_test.py @@ -0,0 +1,256 @@ +#! /usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Test for google.protobuf.internal.wire_format.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +try: + import unittest2 as unittest +except ImportError: + import unittest +from google.protobuf import message +from google.protobuf.internal import wire_format + + +class WireFormatTest(unittest.TestCase): + + def testPackTag(self): + field_number = 0xabc + tag_type = 2 + self.assertEqual((field_number << 3) | tag_type, + wire_format.PackTag(field_number, tag_type)) + PackTag = wire_format.PackTag + # Number too high. + self.assertRaises(message.EncodeError, PackTag, field_number, 6) + # Number too low. + self.assertRaises(message.EncodeError, PackTag, field_number, -1) + + def testUnpackTag(self): + # Test field numbers that will require various varint sizes. + for expected_field_number in (1, 15, 16, 2047, 2048): + for expected_wire_type in range(6): # Highest-numbered wiretype is 5. + field_number, wire_type = wire_format.UnpackTag( + wire_format.PackTag(expected_field_number, expected_wire_type)) + self.assertEqual(expected_field_number, field_number) + self.assertEqual(expected_wire_type, wire_type) + + self.assertRaises(TypeError, wire_format.UnpackTag, None) + self.assertRaises(TypeError, wire_format.UnpackTag, 'abc') + self.assertRaises(TypeError, wire_format.UnpackTag, 0.0) + self.assertRaises(TypeError, wire_format.UnpackTag, object()) + + def testZigZagEncode(self): + Z = wire_format.ZigZagEncode + self.assertEqual(0, Z(0)) + self.assertEqual(1, Z(-1)) + self.assertEqual(2, Z(1)) + self.assertEqual(3, Z(-2)) + self.assertEqual(4, Z(2)) + self.assertEqual(0xfffffffe, Z(0x7fffffff)) + self.assertEqual(0xffffffff, Z(-0x80000000)) + self.assertEqual(0xfffffffffffffffe, Z(0x7fffffffffffffff)) + self.assertEqual(0xffffffffffffffff, Z(-0x8000000000000000)) + + self.assertRaises(TypeError, Z, None) + self.assertRaises(TypeError, Z, 'abcd') + self.assertRaises(TypeError, Z, 0.0) + self.assertRaises(TypeError, Z, object()) + + def testZigZagDecode(self): + Z = wire_format.ZigZagDecode + self.assertEqual(0, Z(0)) + self.assertEqual(-1, Z(1)) + self.assertEqual(1, Z(2)) + self.assertEqual(-2, Z(3)) + self.assertEqual(2, Z(4)) + self.assertEqual(0x7fffffff, Z(0xfffffffe)) + self.assertEqual(-0x80000000, Z(0xffffffff)) + self.assertEqual(0x7fffffffffffffff, Z(0xfffffffffffffffe)) + self.assertEqual(-0x8000000000000000, Z(0xffffffffffffffff)) + + self.assertRaises(TypeError, Z, None) + self.assertRaises(TypeError, Z, 'abcd') + self.assertRaises(TypeError, Z, 0.0) + self.assertRaises(TypeError, Z, object()) + + def NumericByteSizeTestHelper(self, byte_size_fn, value, expected_value_size): + # Use field numbers that cause various byte sizes for the tag information. + for field_number, tag_bytes in ((15, 1), (16, 2), (2047, 2), (2048, 3)): + expected_size = expected_value_size + tag_bytes + actual_size = byte_size_fn(field_number, value) + self.assertEqual(expected_size, actual_size, + 'byte_size_fn: %s, field_number: %d, value: %r\n' + 'Expected: %d, Actual: %d'% ( + byte_size_fn, field_number, value, expected_size, actual_size)) + + def testByteSizeFunctions(self): + # Test all numeric *ByteSize() functions. + NUMERIC_ARGS = [ + # Int32ByteSize(). + [wire_format.Int32ByteSize, 0, 1], + [wire_format.Int32ByteSize, 127, 1], + [wire_format.Int32ByteSize, 128, 2], + [wire_format.Int32ByteSize, -1, 10], + # Int64ByteSize(). + [wire_format.Int64ByteSize, 0, 1], + [wire_format.Int64ByteSize, 127, 1], + [wire_format.Int64ByteSize, 128, 2], + [wire_format.Int64ByteSize, -1, 10], + # UInt32ByteSize(). + [wire_format.UInt32ByteSize, 0, 1], + [wire_format.UInt32ByteSize, 127, 1], + [wire_format.UInt32ByteSize, 128, 2], + [wire_format.UInt32ByteSize, wire_format.UINT32_MAX, 5], + # UInt64ByteSize(). + [wire_format.UInt64ByteSize, 0, 1], + [wire_format.UInt64ByteSize, 127, 1], + [wire_format.UInt64ByteSize, 128, 2], + [wire_format.UInt64ByteSize, wire_format.UINT64_MAX, 10], + # SInt32ByteSize(). + [wire_format.SInt32ByteSize, 0, 1], + [wire_format.SInt32ByteSize, -1, 1], + [wire_format.SInt32ByteSize, 1, 1], + [wire_format.SInt32ByteSize, -63, 1], + [wire_format.SInt32ByteSize, 63, 1], + [wire_format.SInt32ByteSize, -64, 1], + [wire_format.SInt32ByteSize, 64, 2], + # SInt64ByteSize(). + [wire_format.SInt64ByteSize, 0, 1], + [wire_format.SInt64ByteSize, -1, 1], + [wire_format.SInt64ByteSize, 1, 1], + [wire_format.SInt64ByteSize, -63, 1], + [wire_format.SInt64ByteSize, 63, 1], + [wire_format.SInt64ByteSize, -64, 1], + [wire_format.SInt64ByteSize, 64, 2], + # Fixed32ByteSize(). + [wire_format.Fixed32ByteSize, 0, 4], + [wire_format.Fixed32ByteSize, wire_format.UINT32_MAX, 4], + # Fixed64ByteSize(). + [wire_format.Fixed64ByteSize, 0, 8], + [wire_format.Fixed64ByteSize, wire_format.UINT64_MAX, 8], + # SFixed32ByteSize(). + [wire_format.SFixed32ByteSize, 0, 4], + [wire_format.SFixed32ByteSize, wire_format.INT32_MIN, 4], + [wire_format.SFixed32ByteSize, wire_format.INT32_MAX, 4], + # SFixed64ByteSize(). + [wire_format.SFixed64ByteSize, 0, 8], + [wire_format.SFixed64ByteSize, wire_format.INT64_MIN, 8], + [wire_format.SFixed64ByteSize, wire_format.INT64_MAX, 8], + # FloatByteSize(). + [wire_format.FloatByteSize, 0.0, 4], + [wire_format.FloatByteSize, 1000000000.0, 4], + [wire_format.FloatByteSize, -1000000000.0, 4], + # DoubleByteSize(). + [wire_format.DoubleByteSize, 0.0, 8], + [wire_format.DoubleByteSize, 1000000000.0, 8], + [wire_format.DoubleByteSize, -1000000000.0, 8], + # BoolByteSize(). + [wire_format.BoolByteSize, False, 1], + [wire_format.BoolByteSize, True, 1], + # EnumByteSize(). + [wire_format.EnumByteSize, 0, 1], + [wire_format.EnumByteSize, 127, 1], + [wire_format.EnumByteSize, 128, 2], + [wire_format.EnumByteSize, wire_format.UINT32_MAX, 5], + ] + for args in NUMERIC_ARGS: + self.NumericByteSizeTestHelper(*args) + + # Test strings and bytes. + for byte_size_fn in (wire_format.StringByteSize, wire_format.BytesByteSize): + # 1 byte for tag, 1 byte for length, 3 bytes for contents. + self.assertEqual(5, byte_size_fn(10, 'abc')) + # 2 bytes for tag, 1 byte for length, 3 bytes for contents. + self.assertEqual(6, byte_size_fn(16, 'abc')) + # 2 bytes for tag, 2 bytes for length, 128 bytes for contents. + self.assertEqual(132, byte_size_fn(16, 'a' * 128)) + + # Test UTF-8 string byte size calculation. + # 1 byte for tag, 1 byte for length, 8 bytes for content. + self.assertEqual(10, wire_format.StringByteSize( + 5, b'\xd0\xa2\xd0\xb5\xd1\x81\xd1\x82'.decode('utf-8'))) + + class MockMessage(object): + def __init__(self, byte_size): + self.byte_size = byte_size + def ByteSize(self): + return self.byte_size + + message_byte_size = 10 + mock_message = MockMessage(byte_size=message_byte_size) + # Test groups. + # (2 * 1) bytes for begin and end tags, plus message_byte_size. + self.assertEqual(2 + message_byte_size, + wire_format.GroupByteSize(1, mock_message)) + # (2 * 2) bytes for begin and end tags, plus message_byte_size. + self.assertEqual(4 + message_byte_size, + wire_format.GroupByteSize(16, mock_message)) + + # Test messages. + # 1 byte for tag, plus 1 byte for length, plus contents. + self.assertEqual(2 + mock_message.byte_size, + wire_format.MessageByteSize(1, mock_message)) + # 2 bytes for tag, plus 1 byte for length, plus contents. + self.assertEqual(3 + mock_message.byte_size, + wire_format.MessageByteSize(16, mock_message)) + # 2 bytes for tag, plus 2 bytes for length, plus contents. + mock_message.byte_size = 128 + self.assertEqual(4 + mock_message.byte_size, + wire_format.MessageByteSize(16, mock_message)) + + + # Test message set item byte size. + # 4 bytes for tags, plus 1 byte for length, plus 1 byte for type_id, + # plus contents. + mock_message.byte_size = 10 + self.assertEqual(mock_message.byte_size + 6, + wire_format.MessageSetItemByteSize(1, mock_message)) + + # 4 bytes for tags, plus 2 bytes for length, plus 1 byte for type_id, + # plus contents. + mock_message.byte_size = 128 + self.assertEqual(mock_message.byte_size + 7, + wire_format.MessageSetItemByteSize(1, mock_message)) + + # 4 bytes for tags, plus 2 bytes for length, plus 2 byte for type_id, + # plus contents. + self.assertEqual(mock_message.byte_size + 8, + wire_format.MessageSetItemByteSize(128, mock_message)) + + # Too-long varint. + self.assertRaises(message.EncodeError, + wire_format.UInt64ByteSize, 1, 1 << 128) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/google/protobuf/json_format.py b/deps/google/protobuf/json_format.py new file mode 100644 index 00000000..cb76e116 --- /dev/null +++ b/deps/google/protobuf/json_format.py @@ -0,0 +1,467 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in JSON format. + +Simple usage example: + + # Create a proto object and serialize it to a json format string. + message = my_proto_pb2.MyMessage(foo='bar') + json_string = json_format.MessageToJson(message) + + # Parse a json format string to proto object. + message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +import base64 +import json +import math +from six import text_type +import sys + +from google.protobuf import descriptor + +_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' +_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, + descriptor.FieldDescriptor.CPPTYPE_UINT32, + descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, + descriptor.FieldDescriptor.CPPTYPE_UINT64]) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_INFINITY = 'Infinity' +_NEG_INFINITY = '-Infinity' +_NAN = 'NaN' + + +class Error(Exception): + """Top-level module error for json_format.""" + + +class SerializeToJsonError(Error): + """Thrown if serialization to JSON fails.""" + + +class ParseError(Error): + """Thrown in case of parsing error.""" + + +def MessageToJson(message, including_default_value_fields=False): + """Converts protobuf message to JSON format. + + Args: + message: The protocol buffers message instance to serialize. + including_default_value_fields: If True, singular primitive fields, + repeated fields, and map fields will always be serialized. If + False, only serialize non-empty fields. Singular message fields + and oneof fields are not affected by this option. + + Returns: + A string containing the JSON formatted protocol buffer message. + """ + js = _MessageToJsonObject(message, including_default_value_fields) + return json.dumps(js, indent=2) + + +def _MessageToJsonObject(message, including_default_value_fields): + """Converts message to an object according to Proto3 JSON Specification.""" + message_descriptor = message.DESCRIPTOR + if hasattr(message, 'ToJsonString'): + return message.ToJsonString() + if _IsWrapperMessage(message_descriptor): + return _WrapperMessageToJsonObject(message) + return _RegularMessageToJsonObject(message, including_default_value_fields) + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def _RegularMessageToJsonObject(message, including_default_value_fields): + """Converts normal message according to Proto3 JSON Specification.""" + js = {} + fields = message.ListFields() + include_default = including_default_value_fields + + try: + for field, value in fields: + name = field.camelcase_name + if _IsMapEntry(field): + # Convert a map field. + v_field = field.message_type.fields_by_name['value'] + js_map = {} + for key in value: + if isinstance(key, bool): + if key: + recorded_key = 'true' + else: + recorded_key = 'false' + else: + recorded_key = key + js_map[recorded_key] = _FieldToJsonObject( + v_field, value[key], including_default_value_fields) + js[name] = js_map + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + # Convert a repeated field. + js[name] = [_FieldToJsonObject(field, k, include_default) + for k in value] + else: + js[name] = _FieldToJsonObject(field, value, include_default) + + # Serialize default value if including_default_value_fields is True. + if including_default_value_fields: + message_descriptor = message.DESCRIPTOR + for field in message_descriptor.fields: + # Singular message fields and oneof fields will not be affected. + if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and + field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or + field.containing_oneof): + continue + name = field.camelcase_name + if name in js: + # Skip the field which has been serailized already. + continue + if _IsMapEntry(field): + js[name] = {} + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + js[name] = [] + else: + js[name] = _FieldToJsonObject(field, field.default_value) + + except ValueError as e: + raise SerializeToJsonError( + 'Failed to serialize {0} field: {1}.'.format(field.name, e)) + + return js + + +def _FieldToJsonObject( + field, value, including_default_value_fields=False): + """Converts field value according to Proto3 JSON Specification.""" + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + return _MessageToJsonObject(value, including_default_value_fields) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + return enum_value.name + else: + raise SerializeToJsonError('Enum field contains an integer value ' + 'which can not mapped to an enum value.') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # Use base64 Data encoding for bytes + return base64.b64encode(value).decode('utf-8') + else: + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return bool(value) + elif field.cpp_type in _INT64_TYPES: + return str(value) + elif field.cpp_type in _FLOAT_TYPES: + if math.isinf(value): + if value < 0.0: + return _NEG_INFINITY + else: + return _INFINITY + if math.isnan(value): + return _NAN + return value + + +def _IsWrapperMessage(message_descriptor): + return message_descriptor.file.name == 'google/protobuf/wrappers.proto' + + +def _WrapperMessageToJsonObject(message): + return _FieldToJsonObject( + message.DESCRIPTOR.fields_by_name['value'], message.value) + + +def _DuplicateChecker(js): + result = {} + for name, value in js: + if name in result: + raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name)) + result[name] = value + return result + + +def Parse(text, message): + """Parses a JSON representation of a protocol message into a message. + + Args: + text: Message JSON representation. + message: A protocol beffer message to merge into. + + Returns: + The same message passed as argument. + + Raises:: + ParseError: On JSON parsing problems. + """ + if not isinstance(text, text_type): text = text.decode('utf-8') + try: + if sys.version_info < (2, 7): + # object_pair_hook is not supported before python2.7 + js = json.loads(text) + else: + js = json.loads(text, object_pairs_hook=_DuplicateChecker) + except ValueError as e: + raise ParseError('Failed to load JSON: {0}.'.format(str(e))) + _ConvertFieldValuePair(js, message) + return message + + +def _ConvertFieldValuePair(js, message): + """Convert field value pairs into regular message. + + Args: + js: A JSON object to convert the field value pairs. + message: A regular protocol message to record the data. + + Raises: + ParseError: In case of problems converting. + """ + names = [] + message_descriptor = message.DESCRIPTOR + for name in js: + try: + field = message_descriptor.fields_by_camelcase_name.get(name, None) + if not field: + raise ParseError( + 'Message type "{0}" has no field named "{1}".'.format( + message_descriptor.full_name, name)) + if name in names: + raise ParseError( + 'Message type "{0}" should not have multiple "{1}" fields.'.format( + message.DESCRIPTOR.full_name, name)) + names.append(name) + # Check no other oneof field is parsed. + if field.containing_oneof is not None: + oneof_name = field.containing_oneof.name + if oneof_name in names: + raise ParseError('Message type "{0}" should not have multiple "{1}" ' + 'oneof fields.'.format( + message.DESCRIPTOR.full_name, oneof_name)) + names.append(oneof_name) + + value = js[name] + if value is None: + message.ClearField(field.name) + continue + + # Parse field value. + if _IsMapEntry(field): + message.ClearField(field.name) + _ConvertMapFieldValue(value, message, field) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + message.ClearField(field.name) + if not isinstance(value, list): + raise ParseError('repeated field {0} must be in [] which is ' + '{1}.'.format(name, value)) + for item in value: + if item is None: + continue + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + sub_message = getattr(message, field.name).add() + _ConvertMessage(item, sub_message) + else: + getattr(message, field.name).append( + _ConvertScalarFieldValue(item, field)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + sub_message = getattr(message, field.name) + _ConvertMessage(value, sub_message) + else: + setattr(message, field.name, _ConvertScalarFieldValue(value, field)) + except ParseError as e: + if field and field.containing_oneof is None: + raise ParseError('Failed to parse {0} field: {1}'.format(name, e)) + else: + raise ParseError(str(e)) + except ValueError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + except TypeError as e: + raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) + + +def _ConvertMessage(value, message): + """Convert a JSON object into a message. + + Args: + value: A JSON object. + message: A WKT or regular protocol message to record the data. + + Raises: + ParseError: In case of convert problems. + """ + message_descriptor = message.DESCRIPTOR + if hasattr(message, 'FromJsonString'): + message.FromJsonString(value) + elif _IsWrapperMessage(message_descriptor): + _ConvertWrapperMessage(value, message) + else: + _ConvertFieldValuePair(value, message) + +def _ConvertWrapperMessage(value, message): + """Convert a JSON representation into Wrapper message.""" + field = message.DESCRIPTOR.fields_by_name['value'] + setattr(message, 'value', _ConvertScalarFieldValue(value, field)) + + +def _ConvertMapFieldValue(value, message, field): + """Convert map field value for a message map field. + + Args: + value: A JSON object to convert the map field value. + message: A protocol message to record the converted data. + field: The descriptor of the map field to be converted. + + Raises: + ParseError: In case of convert problems. + """ + if not isinstance(value, dict): + raise ParseError( + 'Map fieled {0} must be in {} which is {1}.'.format(field.name, value)) + key_field = field.message_type.fields_by_name['key'] + value_field = field.message_type.fields_by_name['value'] + for key in value: + key_value = _ConvertScalarFieldValue(key, key_field, True) + if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + _ConvertMessage(value[key], getattr(message, field.name)[key_value]) + else: + getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( + value[key], value_field) + + +def _ConvertScalarFieldValue(value, field, require_str=False): + """Convert a single scalar field value. + + Args: + value: A scalar value to convert the scalar field value. + field: The descriptor of the field to convert. + require_str: If True, the field value must be a str. + + Returns: + The converted scalar field value + + Raises: + ParseError: In case of convert problems. + """ + if field.cpp_type in _INT_TYPES: + return _ConvertInteger(value) + elif field.cpp_type in _FLOAT_TYPES: + return _ConvertFloat(value) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + return _ConvertBool(value, require_str) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + return base64.b64decode(value) + else: + return value + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + # Convert an enum value. + enum_value = field.enum_type.values_by_name.get(value, None) + if enum_value is None: + raise ParseError( + 'Enum value must be a string literal with double quotes. ' + 'Type "{0}" has no value named {1}.'.format( + field.enum_type.full_name, value)) + return enum_value.number + + +def _ConvertInteger(value): + """Convert an integer. + + Args: + value: A scalar value to convert. + + Returns: + The integer value. + + Raises: + ParseError: If an integer couldn't be consumed. + """ + if isinstance(value, float): + raise ParseError('Couldn\'t parse integer: {0}.'.format(value)) + + if isinstance(value, text_type) and value.find(' ') != -1: + raise ParseError('Couldn\'t parse integer: "{0}".'.format(value)) + + return int(value) + + +def _ConvertFloat(value): + """Convert an floating point number.""" + if value == 'nan': + raise ParseError('Couldn\'t parse float "nan", use "NaN" instead.') + try: + # Assume Python compatible syntax. + return float(value) + except ValueError: + # Check alternative spellings. + if value == _NEG_INFINITY: + return float('-inf') + elif value == _INFINITY: + return float('inf') + elif value == _NAN: + return float('nan') + else: + raise ParseError('Couldn\'t parse float: {0}.'.format(value)) + + +def _ConvertBool(value, require_str): + """Convert a boolean value. + + Args: + value: A scalar value to convert. + require_str: If True, value must be a str. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + if require_str: + if value == 'true': + return True + elif value == 'false': + return False + else: + raise ParseError('Expected "true" or "false", not {0}.'.format(value)) + + if not isinstance(value, bool): + raise ParseError('Expected true or false without quotes.') + return value diff --git a/deps/google/protobuf/map_unittest_pb2.py b/deps/google/protobuf/map_unittest_pb2.py new file mode 100644 index 00000000..6cd702e2 --- /dev/null +++ b/deps/google/protobuf/map_unittest_pb2.py @@ -0,0 +1,2801 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/map_unittest.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 +from google.protobuf import unittest_no_arena_pb2 as google_dot_protobuf_dot_unittest__no__arena__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/map_unittest.proto', + package='protobuf_unittest', + syntax='proto3', + serialized_pb=_b('\n\"google/protobuf/map_unittest.proto\x12\x11protobuf_unittest\x1a\x1egoogle/protobuf/unittest.proto\x1a\'google/protobuf/unittest_no_arena.proto\"\xd6\x13\n\x07TestMap\x12\x46\n\x0fmap_int32_int32\x18\x01 \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32Int32Entry\x12\x46\n\x0fmap_int64_int64\x18\x02 \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt64Int64Entry\x12J\n\x11map_uint32_uint32\x18\x03 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapUint32Uint32Entry\x12J\n\x11map_uint64_uint64\x18\x04 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapUint64Uint64Entry\x12J\n\x11map_sint32_sint32\x18\x05 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapSint32Sint32Entry\x12J\n\x11map_sint64_sint64\x18\x06 \x03(\x0b\x32/.protobuf_unittest.TestMap.MapSint64Sint64Entry\x12N\n\x13map_fixed32_fixed32\x18\x07 \x03(\x0b\x32\x31.protobuf_unittest.TestMap.MapFixed32Fixed32Entry\x12N\n\x13map_fixed64_fixed64\x18\x08 \x03(\x0b\x32\x31.protobuf_unittest.TestMap.MapFixed64Fixed64Entry\x12R\n\x15map_sfixed32_sfixed32\x18\t \x03(\x0b\x32\x33.protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry\x12R\n\x15map_sfixed64_sfixed64\x18\n \x03(\x0b\x32\x33.protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry\x12\x46\n\x0fmap_int32_float\x18\x0b \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32FloatEntry\x12H\n\x10map_int32_double\x18\x0c \x03(\x0b\x32..protobuf_unittest.TestMap.MapInt32DoubleEntry\x12\x42\n\rmap_bool_bool\x18\r \x03(\x0b\x32+.protobuf_unittest.TestMap.MapBoolBoolEntry\x12J\n\x11map_string_string\x18\x0e \x03(\x0b\x32/.protobuf_unittest.TestMap.MapStringStringEntry\x12\x46\n\x0fmap_int32_bytes\x18\x0f \x03(\x0b\x32-.protobuf_unittest.TestMap.MapInt32BytesEntry\x12\x44\n\x0emap_int32_enum\x18\x10 \x03(\x0b\x32,.protobuf_unittest.TestMap.MapInt32EnumEntry\x12Y\n\x19map_int32_foreign_message\x18\x11 \x03(\x0b\x32\x36.protobuf_unittest.TestMap.MapInt32ForeignMessageEntry\x12[\n\x1amap_string_foreign_message\x18\x12 \x03(\x0b\x32\x37.protobuf_unittest.TestMap.MapStringForeignMessageEntry\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12MapInt32BytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1aO\n\x11MapInt32EnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12)\n\x05value\x18\x02 \x01(\x0e\x32\x1a.protobuf_unittest.MapEnum:\x02\x38\x01\x1a`\n\x1bMapInt32ForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\x1a\x61\n\x1cMapStringForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\"A\n\x11TestMapSubmessage\x12,\n\x08test_map\x18\x01 \x01(\x0b\x32\x1a.protobuf_unittest.TestMap\"\xbc\x01\n\x0eTestMessageMap\x12Q\n\x11map_int32_message\x18\x01 \x03(\x0b\x32\x36.protobuf_unittest.TestMessageMap.MapInt32MessageEntry\x1aW\n\x14MapInt32MessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes:\x02\x38\x01\"\xe3\x01\n\x0fTestSameTypeMap\x12:\n\x04map1\x18\x01 \x03(\x0b\x32,.protobuf_unittest.TestSameTypeMap.Map1Entry\x12:\n\x04map2\x18\x02 \x03(\x0b\x32,.protobuf_unittest.TestSameTypeMap.Map2Entry\x1a+\n\tMap1Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a+\n\tMap2Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\xb6\x01\n\x16TestRequiredMessageMap\x12J\n\tmap_field\x18\x01 \x03(\x0b\x32\x37.protobuf_unittest.TestRequiredMessageMap.MapFieldEntry\x1aP\n\rMapFieldEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired:\x02\x38\x01\"\xd2\x14\n\x0cTestArenaMap\x12K\n\x0fmap_int32_int32\x18\x01 \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32Int32Entry\x12K\n\x0fmap_int64_int64\x18\x02 \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt64Int64Entry\x12O\n\x11map_uint32_uint32\x18\x03 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapUint32Uint32Entry\x12O\n\x11map_uint64_uint64\x18\x04 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapUint64Uint64Entry\x12O\n\x11map_sint32_sint32\x18\x05 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapSint32Sint32Entry\x12O\n\x11map_sint64_sint64\x18\x06 \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapSint64Sint64Entry\x12S\n\x13map_fixed32_fixed32\x18\x07 \x03(\x0b\x32\x36.protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry\x12S\n\x13map_fixed64_fixed64\x18\x08 \x03(\x0b\x32\x36.protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry\x12W\n\x15map_sfixed32_sfixed32\x18\t \x03(\x0b\x32\x38.protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry\x12W\n\x15map_sfixed64_sfixed64\x18\n \x03(\x0b\x32\x38.protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry\x12K\n\x0fmap_int32_float\x18\x0b \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32FloatEntry\x12M\n\x10map_int32_double\x18\x0c \x03(\x0b\x32\x33.protobuf_unittest.TestArenaMap.MapInt32DoubleEntry\x12G\n\rmap_bool_bool\x18\r \x03(\x0b\x32\x30.protobuf_unittest.TestArenaMap.MapBoolBoolEntry\x12O\n\x11map_string_string\x18\x0e \x03(\x0b\x32\x34.protobuf_unittest.TestArenaMap.MapStringStringEntry\x12K\n\x0fmap_int32_bytes\x18\x0f \x03(\x0b\x32\x32.protobuf_unittest.TestArenaMap.MapInt32BytesEntry\x12I\n\x0emap_int32_enum\x18\x10 \x03(\x0b\x32\x31.protobuf_unittest.TestArenaMap.MapInt32EnumEntry\x12^\n\x19map_int32_foreign_message\x18\x11 \x03(\x0b\x32;.protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry\x12n\n\"map_int32_foreign_message_no_arena\x18\x12 \x03(\x0b\x32\x42.protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry\x1a\x34\n\x12MapInt32Int32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x34\n\x12MapInt64Int64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x36\n\x14MapUint32Uint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a\x36\n\x14MapUint64Uint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x04:\x02\x38\x01\x1a\x36\n\x14MapSint32Sint32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x11:\x02\x38\x01\x1a\x36\n\x14MapSint64Sint64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x12\x12\r\n\x05value\x18\x02 \x01(\x12:\x02\x38\x01\x1a\x38\n\x16MapFixed32Fixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x07\x12\r\n\x05value\x18\x02 \x01(\x07:\x02\x38\x01\x1a\x38\n\x16MapFixed64Fixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x1a:\n\x18MapSfixed32Sfixed32Entry\x12\x0b\n\x03key\x18\x01 \x01(\x0f\x12\r\n\x05value\x18\x02 \x01(\x0f:\x02\x38\x01\x1a:\n\x18MapSfixed64Sfixed64Entry\x12\x0b\n\x03key\x18\x01 \x01(\x10\x12\r\n\x05value\x18\x02 \x01(\x10:\x02\x38\x01\x1a\x34\n\x12MapInt32FloatEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x02:\x02\x38\x01\x1a\x35\n\x13MapInt32DoubleEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\x32\n\x10MapBoolBoolEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x08:\x02\x38\x01\x1a\x36\n\x14MapStringStringEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x34\n\x12MapInt32BytesEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\x1aO\n\x11MapInt32EnumEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12)\n\x05value\x18\x02 \x01(\x0e\x32\x1a.protobuf_unittest.MapEnum:\x02\x38\x01\x1a`\n\x1bMapInt32ForeignMessageEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:\x02\x38\x01\x1ap\n\"MapInt32ForeignMessageNoArenaEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage:\x02\x38\x01\"\xe4\x01\n\x1fMessageContainingEnumCalledType\x12J\n\x04type\x18\x01 \x03(\x0b\x32<.protobuf_unittest.MessageContainingEnumCalledType.TypeEntry\x1a_\n\tTypeEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.protobuf_unittest.MessageContainingEnumCalledType:\x02\x38\x01\"\x14\n\x04Type\x12\x0c\n\x08TYPE_FOO\x10\x00\"\x9d\x01\n\x1fMessageContainingMapCalledEntry\x12L\n\x05\x65ntry\x18\x01 \x03(\x0b\x32=.protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry\x1a,\n\nEntryEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\xad\x01\n\x17TestRecursiveMapMessage\x12<\n\x01\x61\x18\x01 \x03(\x0b\x32\x31.protobuf_unittest.TestRecursiveMapMessage.AEntry\x1aT\n\x06\x41\x45ntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.protobuf_unittest.TestRecursiveMapMessage:\x02\x38\x01*?\n\x07MapEnum\x12\x10\n\x0cMAP_ENUM_FOO\x10\x00\x12\x10\n\x0cMAP_ENUM_BAR\x10\x01\x12\x10\n\x0cMAP_ENUM_BAZ\x10\x02\x42\x03\xf8\x01\x01\x62\x06proto3') + , + dependencies=[google_dot_protobuf_dot_unittest__pb2.DESCRIPTOR,google_dot_protobuf_dot_unittest__no__arena__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_MAPENUM = _descriptor.EnumDescriptor( + name='MapEnum', + full_name='protobuf_unittest.MapEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MAP_ENUM_FOO', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAP_ENUM_BAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAP_ENUM_BAZ', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=6536, + serialized_end=6599, +) +_sym_db.RegisterEnumDescriptor(_MAPENUM) + +MapEnum = enum_type_wrapper.EnumTypeWrapper(_MAPENUM) +MAP_ENUM_FOO = 0 +MAP_ENUM_BAR = 1 +MAP_ENUM_BAZ = 2 + + +_MESSAGECONTAININGENUMCALLEDTYPE_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='protobuf_unittest.MessageContainingEnumCalledType.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_FOO', index=0, number=0, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=6178, + serialized_end=6198, +) +_sym_db.RegisterEnumDescriptor(_MESSAGECONTAININGENUMCALLEDTYPE_TYPE) + + +_TESTMAP_MAPINT32INT32ENTRY = _descriptor.Descriptor( + name='MapInt32Int32Entry', + full_name='protobuf_unittest.TestMap.MapInt32Int32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt32Int32Entry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt32Int32Entry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1534, + serialized_end=1586, +) + +_TESTMAP_MAPINT64INT64ENTRY = _descriptor.Descriptor( + name='MapInt64Int64Entry', + full_name='protobuf_unittest.TestMap.MapInt64Int64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt64Int64Entry.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt64Int64Entry.value', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1588, + serialized_end=1640, +) + +_TESTMAP_MAPUINT32UINT32ENTRY = _descriptor.Descriptor( + name='MapUint32Uint32Entry', + full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry.key', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapUint32Uint32Entry.value', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1642, + serialized_end=1696, +) + +_TESTMAP_MAPUINT64UINT64ENTRY = _descriptor.Descriptor( + name='MapUint64Uint64Entry', + full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry.key', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapUint64Uint64Entry.value', index=1, + number=2, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1698, + serialized_end=1752, +) + +_TESTMAP_MAPSINT32SINT32ENTRY = _descriptor.Descriptor( + name='MapSint32Sint32Entry', + full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry.key', index=0, + number=1, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapSint32Sint32Entry.value', index=1, + number=2, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1754, + serialized_end=1808, +) + +_TESTMAP_MAPSINT64SINT64ENTRY = _descriptor.Descriptor( + name='MapSint64Sint64Entry', + full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry.key', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapSint64Sint64Entry.value', index=1, + number=2, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1810, + serialized_end=1864, +) + +_TESTMAP_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor( + name='MapFixed32Fixed32Entry', + full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry.key', index=0, + number=1, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapFixed32Fixed32Entry.value', index=1, + number=2, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1866, + serialized_end=1922, +) + +_TESTMAP_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor( + name='MapFixed64Fixed64Entry', + full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry.key', index=0, + number=1, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapFixed64Fixed64Entry.value', index=1, + number=2, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1924, + serialized_end=1980, +) + +_TESTMAP_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor( + name='MapSfixed32Sfixed32Entry', + full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry.key', index=0, + number=1, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry.value', index=1, + number=2, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1982, + serialized_end=2040, +) + +_TESTMAP_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor( + name='MapSfixed64Sfixed64Entry', + full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry.key', index=0, + number=1, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry.value', index=1, + number=2, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2042, + serialized_end=2100, +) + +_TESTMAP_MAPINT32FLOATENTRY = _descriptor.Descriptor( + name='MapInt32FloatEntry', + full_name='protobuf_unittest.TestMap.MapInt32FloatEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt32FloatEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt32FloatEntry.value', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2102, + serialized_end=2154, +) + +_TESTMAP_MAPINT32DOUBLEENTRY = _descriptor.Descriptor( + name='MapInt32DoubleEntry', + full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt32DoubleEntry.value', index=1, + number=2, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2156, + serialized_end=2209, +) + +_TESTMAP_MAPBOOLBOOLENTRY = _descriptor.Descriptor( + name='MapBoolBoolEntry', + full_name='protobuf_unittest.TestMap.MapBoolBoolEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapBoolBoolEntry.key', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapBoolBoolEntry.value', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2211, + serialized_end=2261, +) + +_TESTMAP_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor( + name='MapStringStringEntry', + full_name='protobuf_unittest.TestMap.MapStringStringEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapStringStringEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapStringStringEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2263, + serialized_end=2317, +) + +_TESTMAP_MAPINT32BYTESENTRY = _descriptor.Descriptor( + name='MapInt32BytesEntry', + full_name='protobuf_unittest.TestMap.MapInt32BytesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt32BytesEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt32BytesEntry.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2319, + serialized_end=2371, +) + +_TESTMAP_MAPINT32ENUMENTRY = _descriptor.Descriptor( + name='MapInt32EnumEntry', + full_name='protobuf_unittest.TestMap.MapInt32EnumEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt32EnumEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt32EnumEntry.value', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2373, + serialized_end=2452, +) + +_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY = _descriptor.Descriptor( + name='MapInt32ForeignMessageEntry', + full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapInt32ForeignMessageEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2454, + serialized_end=2550, +) + +_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY = _descriptor.Descriptor( + name='MapStringForeignMessageEntry', + full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMap.MapStringForeignMessageEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2552, + serialized_end=2649, +) + +_TESTMAP = _descriptor.Descriptor( + name='TestMap', + full_name='protobuf_unittest.TestMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='map_int32_int32', full_name='protobuf_unittest.TestMap.map_int32_int32', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int64_int64', full_name='protobuf_unittest.TestMap.map_int64_int64', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_uint32_uint32', full_name='protobuf_unittest.TestMap.map_uint32_uint32', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_uint64_uint64', full_name='protobuf_unittest.TestMap.map_uint64_uint64', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sint32_sint32', full_name='protobuf_unittest.TestMap.map_sint32_sint32', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sint64_sint64', full_name='protobuf_unittest.TestMap.map_sint64_sint64', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_fixed32_fixed32', full_name='protobuf_unittest.TestMap.map_fixed32_fixed32', index=6, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_fixed64_fixed64', full_name='protobuf_unittest.TestMap.map_fixed64_fixed64', index=7, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sfixed32_sfixed32', full_name='protobuf_unittest.TestMap.map_sfixed32_sfixed32', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sfixed64_sfixed64', full_name='protobuf_unittest.TestMap.map_sfixed64_sfixed64', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_float', full_name='protobuf_unittest.TestMap.map_int32_float', index=10, + number=11, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_double', full_name='protobuf_unittest.TestMap.map_int32_double', index=11, + number=12, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_bool_bool', full_name='protobuf_unittest.TestMap.map_bool_bool', index=12, + number=13, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_string_string', full_name='protobuf_unittest.TestMap.map_string_string', index=13, + number=14, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_bytes', full_name='protobuf_unittest.TestMap.map_int32_bytes', index=14, + number=15, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_enum', full_name='protobuf_unittest.TestMap.map_int32_enum', index=15, + number=16, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_foreign_message', full_name='protobuf_unittest.TestMap.map_int32_foreign_message', index=16, + number=17, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_string_foreign_message', full_name='protobuf_unittest.TestMap.map_string_foreign_message', index=17, + number=18, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTMAP_MAPINT32INT32ENTRY, _TESTMAP_MAPINT64INT64ENTRY, _TESTMAP_MAPUINT32UINT32ENTRY, _TESTMAP_MAPUINT64UINT64ENTRY, _TESTMAP_MAPSINT32SINT32ENTRY, _TESTMAP_MAPSINT64SINT64ENTRY, _TESTMAP_MAPFIXED32FIXED32ENTRY, _TESTMAP_MAPFIXED64FIXED64ENTRY, _TESTMAP_MAPSFIXED32SFIXED32ENTRY, _TESTMAP_MAPSFIXED64SFIXED64ENTRY, _TESTMAP_MAPINT32FLOATENTRY, _TESTMAP_MAPINT32DOUBLEENTRY, _TESTMAP_MAPBOOLBOOLENTRY, _TESTMAP_MAPSTRINGSTRINGENTRY, _TESTMAP_MAPINT32BYTESENTRY, _TESTMAP_MAPINT32ENUMENTRY, _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY, _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=131, + serialized_end=2649, +) + + +_TESTMAPSUBMESSAGE = _descriptor.Descriptor( + name='TestMapSubmessage', + full_name='protobuf_unittest.TestMapSubmessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='test_map', full_name='protobuf_unittest.TestMapSubmessage.test_map', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2651, + serialized_end=2716, +) + + +_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY = _descriptor.Descriptor( + name='MapInt32MessageEntry', + full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestMessageMap.MapInt32MessageEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2820, + serialized_end=2907, +) + +_TESTMESSAGEMAP = _descriptor.Descriptor( + name='TestMessageMap', + full_name='protobuf_unittest.TestMessageMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='map_int32_message', full_name='protobuf_unittest.TestMessageMap.map_int32_message', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2719, + serialized_end=2907, +) + + +_TESTSAMETYPEMAP_MAP1ENTRY = _descriptor.Descriptor( + name='Map1Entry', + full_name='protobuf_unittest.TestSameTypeMap.Map1Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestSameTypeMap.Map1Entry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestSameTypeMap.Map1Entry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3049, + serialized_end=3092, +) + +_TESTSAMETYPEMAP_MAP2ENTRY = _descriptor.Descriptor( + name='Map2Entry', + full_name='protobuf_unittest.TestSameTypeMap.Map2Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestSameTypeMap.Map2Entry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestSameTypeMap.Map2Entry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3094, + serialized_end=3137, +) + +_TESTSAMETYPEMAP = _descriptor.Descriptor( + name='TestSameTypeMap', + full_name='protobuf_unittest.TestSameTypeMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='map1', full_name='protobuf_unittest.TestSameTypeMap.map1', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map2', full_name='protobuf_unittest.TestSameTypeMap.map2', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTSAMETYPEMAP_MAP1ENTRY, _TESTSAMETYPEMAP_MAP2ENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2910, + serialized_end=3137, +) + + +_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY = _descriptor.Descriptor( + name='MapFieldEntry', + full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestRequiredMessageMap.MapFieldEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3242, + serialized_end=3322, +) + +_TESTREQUIREDMESSAGEMAP = _descriptor.Descriptor( + name='TestRequiredMessageMap', + full_name='protobuf_unittest.TestRequiredMessageMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='map_field', full_name='protobuf_unittest.TestRequiredMessageMap.map_field', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3140, + serialized_end=3322, +) + + +_TESTARENAMAP_MAPINT32INT32ENTRY = _descriptor.Descriptor( + name='MapInt32Int32Entry', + full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32Int32Entry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1534, + serialized_end=1586, +) + +_TESTARENAMAP_MAPINT64INT64ENTRY = _descriptor.Descriptor( + name='MapInt64Int64Entry', + full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt64Int64Entry.value', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1588, + serialized_end=1640, +) + +_TESTARENAMAP_MAPUINT32UINT32ENTRY = _descriptor.Descriptor( + name='MapUint32Uint32Entry', + full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry.key', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapUint32Uint32Entry.value', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1642, + serialized_end=1696, +) + +_TESTARENAMAP_MAPUINT64UINT64ENTRY = _descriptor.Descriptor( + name='MapUint64Uint64Entry', + full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry.key', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapUint64Uint64Entry.value', index=1, + number=2, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1698, + serialized_end=1752, +) + +_TESTARENAMAP_MAPSINT32SINT32ENTRY = _descriptor.Descriptor( + name='MapSint32Sint32Entry', + full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry.key', index=0, + number=1, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapSint32Sint32Entry.value', index=1, + number=2, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1754, + serialized_end=1808, +) + +_TESTARENAMAP_MAPSINT64SINT64ENTRY = _descriptor.Descriptor( + name='MapSint64Sint64Entry', + full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry.key', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapSint64Sint64Entry.value', index=1, + number=2, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1810, + serialized_end=1864, +) + +_TESTARENAMAP_MAPFIXED32FIXED32ENTRY = _descriptor.Descriptor( + name='MapFixed32Fixed32Entry', + full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry.key', index=0, + number=1, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry.value', index=1, + number=2, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1866, + serialized_end=1922, +) + +_TESTARENAMAP_MAPFIXED64FIXED64ENTRY = _descriptor.Descriptor( + name='MapFixed64Fixed64Entry', + full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry.key', index=0, + number=1, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry.value', index=1, + number=2, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1924, + serialized_end=1980, +) + +_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY = _descriptor.Descriptor( + name='MapSfixed32Sfixed32Entry', + full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry.key', index=0, + number=1, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry.value', index=1, + number=2, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1982, + serialized_end=2040, +) + +_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY = _descriptor.Descriptor( + name='MapSfixed64Sfixed64Entry', + full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry.key', index=0, + number=1, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry.value', index=1, + number=2, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2042, + serialized_end=2100, +) + +_TESTARENAMAP_MAPINT32FLOATENTRY = _descriptor.Descriptor( + name='MapInt32FloatEntry', + full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32FloatEntry.value', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2102, + serialized_end=2154, +) + +_TESTARENAMAP_MAPINT32DOUBLEENTRY = _descriptor.Descriptor( + name='MapInt32DoubleEntry', + full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32DoubleEntry.value', index=1, + number=2, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2156, + serialized_end=2209, +) + +_TESTARENAMAP_MAPBOOLBOOLENTRY = _descriptor.Descriptor( + name='MapBoolBoolEntry', + full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry.key', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapBoolBoolEntry.value', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2211, + serialized_end=2261, +) + +_TESTARENAMAP_MAPSTRINGSTRINGENTRY = _descriptor.Descriptor( + name='MapStringStringEntry', + full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapStringStringEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2263, + serialized_end=2317, +) + +_TESTARENAMAP_MAPINT32BYTESENTRY = _descriptor.Descriptor( + name='MapInt32BytesEntry', + full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32BytesEntry.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2319, + serialized_end=2371, +) + +_TESTARENAMAP_MAPINT32ENUMENTRY = _descriptor.Descriptor( + name='MapInt32EnumEntry', + full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32EnumEntry.value', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2373, + serialized_end=2452, +) + +_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY = _descriptor.Descriptor( + name='MapInt32ForeignMessageEntry', + full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2454, + serialized_end=2550, +) + +_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY = _descriptor.Descriptor( + name='MapInt32ForeignMessageNoArenaEntry', + full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5855, + serialized_end=5967, +) + +_TESTARENAMAP = _descriptor.Descriptor( + name='TestArenaMap', + full_name='protobuf_unittest.TestArenaMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='map_int32_int32', full_name='protobuf_unittest.TestArenaMap.map_int32_int32', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int64_int64', full_name='protobuf_unittest.TestArenaMap.map_int64_int64', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_uint32_uint32', full_name='protobuf_unittest.TestArenaMap.map_uint32_uint32', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_uint64_uint64', full_name='protobuf_unittest.TestArenaMap.map_uint64_uint64', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sint32_sint32', full_name='protobuf_unittest.TestArenaMap.map_sint32_sint32', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sint64_sint64', full_name='protobuf_unittest.TestArenaMap.map_sint64_sint64', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_fixed32_fixed32', full_name='protobuf_unittest.TestArenaMap.map_fixed32_fixed32', index=6, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_fixed64_fixed64', full_name='protobuf_unittest.TestArenaMap.map_fixed64_fixed64', index=7, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sfixed32_sfixed32', full_name='protobuf_unittest.TestArenaMap.map_sfixed32_sfixed32', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_sfixed64_sfixed64', full_name='protobuf_unittest.TestArenaMap.map_sfixed64_sfixed64', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_float', full_name='protobuf_unittest.TestArenaMap.map_int32_float', index=10, + number=11, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_double', full_name='protobuf_unittest.TestArenaMap.map_int32_double', index=11, + number=12, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_bool_bool', full_name='protobuf_unittest.TestArenaMap.map_bool_bool', index=12, + number=13, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_string_string', full_name='protobuf_unittest.TestArenaMap.map_string_string', index=13, + number=14, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_bytes', full_name='protobuf_unittest.TestArenaMap.map_int32_bytes', index=14, + number=15, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_enum', full_name='protobuf_unittest.TestArenaMap.map_int32_enum', index=15, + number=16, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_foreign_message', full_name='protobuf_unittest.TestArenaMap.map_int32_foreign_message', index=16, + number=17, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_int32_foreign_message_no_arena', full_name='protobuf_unittest.TestArenaMap.map_int32_foreign_message_no_arena', index=17, + number=18, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTARENAMAP_MAPINT32INT32ENTRY, _TESTARENAMAP_MAPINT64INT64ENTRY, _TESTARENAMAP_MAPUINT32UINT32ENTRY, _TESTARENAMAP_MAPUINT64UINT64ENTRY, _TESTARENAMAP_MAPSINT32SINT32ENTRY, _TESTARENAMAP_MAPSINT64SINT64ENTRY, _TESTARENAMAP_MAPFIXED32FIXED32ENTRY, _TESTARENAMAP_MAPFIXED64FIXED64ENTRY, _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY, _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY, _TESTARENAMAP_MAPINT32FLOATENTRY, _TESTARENAMAP_MAPINT32DOUBLEENTRY, _TESTARENAMAP_MAPBOOLBOOLENTRY, _TESTARENAMAP_MAPSTRINGSTRINGENTRY, _TESTARENAMAP_MAPINT32BYTESENTRY, _TESTARENAMAP_MAPINT32ENUMENTRY, _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY, _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3325, + serialized_end=5967, +) + + +_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY = _descriptor.Descriptor( + name='TypeEntry', + full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.MessageContainingEnumCalledType.TypeEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6081, + serialized_end=6176, +) + +_MESSAGECONTAININGENUMCALLEDTYPE = _descriptor.Descriptor( + name='MessageContainingEnumCalledType', + full_name='protobuf_unittest.MessageContainingEnumCalledType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='protobuf_unittest.MessageContainingEnumCalledType.type', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY, ], + enum_types=[ + _MESSAGECONTAININGENUMCALLEDTYPE_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5970, + serialized_end=6198, +) + + +_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY = _descriptor.Descriptor( + name='EntryEntry', + full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6314, + serialized_end=6358, +) + +_MESSAGECONTAININGMAPCALLEDENTRY = _descriptor.Descriptor( + name='MessageContainingMapCalledEntry', + full_name='protobuf_unittest.MessageContainingMapCalledEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entry', full_name='protobuf_unittest.MessageContainingMapCalledEntry.entry', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6201, + serialized_end=6358, +) + + +_TESTRECURSIVEMAPMESSAGE_AENTRY = _descriptor.Descriptor( + name='AEntry', + full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.TestRecursiveMapMessage.AEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6450, + serialized_end=6534, +) + +_TESTRECURSIVEMAPMESSAGE = _descriptor.Descriptor( + name='TestRecursiveMapMessage', + full_name='protobuf_unittest.TestRecursiveMapMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestRecursiveMapMessage.a', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTRECURSIVEMAPMESSAGE_AENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6361, + serialized_end=6534, +) + +_TESTMAP_MAPINT32INT32ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPINT64INT64ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPUINT32UINT32ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPUINT64UINT64ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPSINT32SINT32ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPSINT64SINT64ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPFIXED32FIXED32ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPFIXED64FIXED64ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTMAP +_TESTMAP_MAPINT32FLOATENTRY.containing_type = _TESTMAP +_TESTMAP_MAPINT32DOUBLEENTRY.containing_type = _TESTMAP +_TESTMAP_MAPBOOLBOOLENTRY.containing_type = _TESTMAP +_TESTMAP_MAPSTRINGSTRINGENTRY.containing_type = _TESTMAP +_TESTMAP_MAPINT32BYTESENTRY.containing_type = _TESTMAP +_TESTMAP_MAPINT32ENUMENTRY.fields_by_name['value'].enum_type = _MAPENUM +_TESTMAP_MAPINT32ENUMENTRY.containing_type = _TESTMAP +_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE +_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.containing_type = _TESTMAP +_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE +_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.containing_type = _TESTMAP +_TESTMAP.fields_by_name['map_int32_int32'].message_type = _TESTMAP_MAPINT32INT32ENTRY +_TESTMAP.fields_by_name['map_int64_int64'].message_type = _TESTMAP_MAPINT64INT64ENTRY +_TESTMAP.fields_by_name['map_uint32_uint32'].message_type = _TESTMAP_MAPUINT32UINT32ENTRY +_TESTMAP.fields_by_name['map_uint64_uint64'].message_type = _TESTMAP_MAPUINT64UINT64ENTRY +_TESTMAP.fields_by_name['map_sint32_sint32'].message_type = _TESTMAP_MAPSINT32SINT32ENTRY +_TESTMAP.fields_by_name['map_sint64_sint64'].message_type = _TESTMAP_MAPSINT64SINT64ENTRY +_TESTMAP.fields_by_name['map_fixed32_fixed32'].message_type = _TESTMAP_MAPFIXED32FIXED32ENTRY +_TESTMAP.fields_by_name['map_fixed64_fixed64'].message_type = _TESTMAP_MAPFIXED64FIXED64ENTRY +_TESTMAP.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTMAP_MAPSFIXED32SFIXED32ENTRY +_TESTMAP.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTMAP_MAPSFIXED64SFIXED64ENTRY +_TESTMAP.fields_by_name['map_int32_float'].message_type = _TESTMAP_MAPINT32FLOATENTRY +_TESTMAP.fields_by_name['map_int32_double'].message_type = _TESTMAP_MAPINT32DOUBLEENTRY +_TESTMAP.fields_by_name['map_bool_bool'].message_type = _TESTMAP_MAPBOOLBOOLENTRY +_TESTMAP.fields_by_name['map_string_string'].message_type = _TESTMAP_MAPSTRINGSTRINGENTRY +_TESTMAP.fields_by_name['map_int32_bytes'].message_type = _TESTMAP_MAPINT32BYTESENTRY +_TESTMAP.fields_by_name['map_int32_enum'].message_type = _TESTMAP_MAPINT32ENUMENTRY +_TESTMAP.fields_by_name['map_int32_foreign_message'].message_type = _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY +_TESTMAP.fields_by_name['map_string_foreign_message'].message_type = _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY +_TESTMAPSUBMESSAGE.fields_by_name['test_map'].message_type = _TESTMAP +_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTALLTYPES +_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.containing_type = _TESTMESSAGEMAP +_TESTMESSAGEMAP.fields_by_name['map_int32_message'].message_type = _TESTMESSAGEMAP_MAPINT32MESSAGEENTRY +_TESTSAMETYPEMAP_MAP1ENTRY.containing_type = _TESTSAMETYPEMAP +_TESTSAMETYPEMAP_MAP2ENTRY.containing_type = _TESTSAMETYPEMAP +_TESTSAMETYPEMAP.fields_by_name['map1'].message_type = _TESTSAMETYPEMAP_MAP1ENTRY +_TESTSAMETYPEMAP.fields_by_name['map2'].message_type = _TESTSAMETYPEMAP_MAP2ENTRY +_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._TESTREQUIRED +_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.containing_type = _TESTREQUIREDMESSAGEMAP +_TESTREQUIREDMESSAGEMAP.fields_by_name['map_field'].message_type = _TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY +_TESTARENAMAP_MAPINT32INT32ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT64INT64ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPUINT32UINT32ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPUINT64UINT64ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPSINT32SINT32ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPSINT64SINT64ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPFIXED32FIXED32ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPFIXED64FIXED64ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT32FLOATENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT32DOUBLEENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPBOOLBOOLENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPSTRINGSTRINGENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT32BYTESENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT32ENUMENTRY.fields_by_name['value'].enum_type = _MAPENUM +_TESTARENAMAP_MAPINT32ENUMENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__pb2._FOREIGNMESSAGE +_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_unittest__no__arena__pb2._FOREIGNMESSAGE +_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.containing_type = _TESTARENAMAP +_TESTARENAMAP.fields_by_name['map_int32_int32'].message_type = _TESTARENAMAP_MAPINT32INT32ENTRY +_TESTARENAMAP.fields_by_name['map_int64_int64'].message_type = _TESTARENAMAP_MAPINT64INT64ENTRY +_TESTARENAMAP.fields_by_name['map_uint32_uint32'].message_type = _TESTARENAMAP_MAPUINT32UINT32ENTRY +_TESTARENAMAP.fields_by_name['map_uint64_uint64'].message_type = _TESTARENAMAP_MAPUINT64UINT64ENTRY +_TESTARENAMAP.fields_by_name['map_sint32_sint32'].message_type = _TESTARENAMAP_MAPSINT32SINT32ENTRY +_TESTARENAMAP.fields_by_name['map_sint64_sint64'].message_type = _TESTARENAMAP_MAPSINT64SINT64ENTRY +_TESTARENAMAP.fields_by_name['map_fixed32_fixed32'].message_type = _TESTARENAMAP_MAPFIXED32FIXED32ENTRY +_TESTARENAMAP.fields_by_name['map_fixed64_fixed64'].message_type = _TESTARENAMAP_MAPFIXED64FIXED64ENTRY +_TESTARENAMAP.fields_by_name['map_sfixed32_sfixed32'].message_type = _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY +_TESTARENAMAP.fields_by_name['map_sfixed64_sfixed64'].message_type = _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY +_TESTARENAMAP.fields_by_name['map_int32_float'].message_type = _TESTARENAMAP_MAPINT32FLOATENTRY +_TESTARENAMAP.fields_by_name['map_int32_double'].message_type = _TESTARENAMAP_MAPINT32DOUBLEENTRY +_TESTARENAMAP.fields_by_name['map_bool_bool'].message_type = _TESTARENAMAP_MAPBOOLBOOLENTRY +_TESTARENAMAP.fields_by_name['map_string_string'].message_type = _TESTARENAMAP_MAPSTRINGSTRINGENTRY +_TESTARENAMAP.fields_by_name['map_int32_bytes'].message_type = _TESTARENAMAP_MAPINT32BYTESENTRY +_TESTARENAMAP.fields_by_name['map_int32_enum'].message_type = _TESTARENAMAP_MAPINT32ENUMENTRY +_TESTARENAMAP.fields_by_name['map_int32_foreign_message'].message_type = _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY +_TESTARENAMAP.fields_by_name['map_int32_foreign_message_no_arena'].message_type = _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY +_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.fields_by_name['value'].message_type = _MESSAGECONTAININGENUMCALLEDTYPE +_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.containing_type = _MESSAGECONTAININGENUMCALLEDTYPE +_MESSAGECONTAININGENUMCALLEDTYPE.fields_by_name['type'].message_type = _MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY +_MESSAGECONTAININGENUMCALLEDTYPE_TYPE.containing_type = _MESSAGECONTAININGENUMCALLEDTYPE +_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY.containing_type = _MESSAGECONTAININGMAPCALLEDENTRY +_MESSAGECONTAININGMAPCALLEDENTRY.fields_by_name['entry'].message_type = _MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY +_TESTRECURSIVEMAPMESSAGE_AENTRY.fields_by_name['value'].message_type = _TESTRECURSIVEMAPMESSAGE +_TESTRECURSIVEMAPMESSAGE_AENTRY.containing_type = _TESTRECURSIVEMAPMESSAGE +_TESTRECURSIVEMAPMESSAGE.fields_by_name['a'].message_type = _TESTRECURSIVEMAPMESSAGE_AENTRY +DESCRIPTOR.message_types_by_name['TestMap'] = _TESTMAP +DESCRIPTOR.message_types_by_name['TestMapSubmessage'] = _TESTMAPSUBMESSAGE +DESCRIPTOR.message_types_by_name['TestMessageMap'] = _TESTMESSAGEMAP +DESCRIPTOR.message_types_by_name['TestSameTypeMap'] = _TESTSAMETYPEMAP +DESCRIPTOR.message_types_by_name['TestRequiredMessageMap'] = _TESTREQUIREDMESSAGEMAP +DESCRIPTOR.message_types_by_name['TestArenaMap'] = _TESTARENAMAP +DESCRIPTOR.message_types_by_name['MessageContainingEnumCalledType'] = _MESSAGECONTAININGENUMCALLEDTYPE +DESCRIPTOR.message_types_by_name['MessageContainingMapCalledEntry'] = _MESSAGECONTAININGMAPCALLEDENTRY +DESCRIPTOR.message_types_by_name['TestRecursiveMapMessage'] = _TESTRECURSIVEMAPMESSAGE +DESCRIPTOR.enum_types_by_name['MapEnum'] = _MAPENUM + +TestMap = _reflection.GeneratedProtocolMessageType('TestMap', (_message.Message,), dict( + + MapInt32Int32Entry = _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT32INT32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32Int32Entry) + )) + , + + MapInt64Int64Entry = _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT64INT64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt64Int64Entry) + )) + , + + MapUint32Uint32Entry = _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPUINT32UINT32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapUint32Uint32Entry) + )) + , + + MapUint64Uint64Entry = _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPUINT64UINT64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapUint64Uint64Entry) + )) + , + + MapSint32Sint32Entry = _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPSINT32SINT32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSint32Sint32Entry) + )) + , + + MapSint64Sint64Entry = _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPSINT64SINT64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSint64Sint64Entry) + )) + , + + MapFixed32Fixed32Entry = _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPFIXED32FIXED32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapFixed32Fixed32Entry) + )) + , + + MapFixed64Fixed64Entry = _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPFIXED64FIXED64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapFixed64Fixed64Entry) + )) + , + + MapSfixed32Sfixed32Entry = _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPSFIXED32SFIXED32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSfixed32Sfixed32Entry) + )) + , + + MapSfixed64Sfixed64Entry = _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPSFIXED64SFIXED64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapSfixed64Sfixed64Entry) + )) + , + + MapInt32FloatEntry = _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT32FLOATENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32FloatEntry) + )) + , + + MapInt32DoubleEntry = _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT32DOUBLEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32DoubleEntry) + )) + , + + MapBoolBoolEntry = _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPBOOLBOOLENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapBoolBoolEntry) + )) + , + + MapStringStringEntry = _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPSTRINGSTRINGENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapStringStringEntry) + )) + , + + MapInt32BytesEntry = _reflection.GeneratedProtocolMessageType('MapInt32BytesEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT32BYTESENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32BytesEntry) + )) + , + + MapInt32EnumEntry = _reflection.GeneratedProtocolMessageType('MapInt32EnumEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT32ENUMENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32EnumEntry) + )) + , + + MapInt32ForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPINT32FOREIGNMESSAGEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapInt32ForeignMessageEntry) + )) + , + + MapStringForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapStringForeignMessageEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap.MapStringForeignMessageEntry) + )) + , + DESCRIPTOR = _TESTMAP, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMap) + )) +_sym_db.RegisterMessage(TestMap) +_sym_db.RegisterMessage(TestMap.MapInt32Int32Entry) +_sym_db.RegisterMessage(TestMap.MapInt64Int64Entry) +_sym_db.RegisterMessage(TestMap.MapUint32Uint32Entry) +_sym_db.RegisterMessage(TestMap.MapUint64Uint64Entry) +_sym_db.RegisterMessage(TestMap.MapSint32Sint32Entry) +_sym_db.RegisterMessage(TestMap.MapSint64Sint64Entry) +_sym_db.RegisterMessage(TestMap.MapFixed32Fixed32Entry) +_sym_db.RegisterMessage(TestMap.MapFixed64Fixed64Entry) +_sym_db.RegisterMessage(TestMap.MapSfixed32Sfixed32Entry) +_sym_db.RegisterMessage(TestMap.MapSfixed64Sfixed64Entry) +_sym_db.RegisterMessage(TestMap.MapInt32FloatEntry) +_sym_db.RegisterMessage(TestMap.MapInt32DoubleEntry) +_sym_db.RegisterMessage(TestMap.MapBoolBoolEntry) +_sym_db.RegisterMessage(TestMap.MapStringStringEntry) +_sym_db.RegisterMessage(TestMap.MapInt32BytesEntry) +_sym_db.RegisterMessage(TestMap.MapInt32EnumEntry) +_sym_db.RegisterMessage(TestMap.MapInt32ForeignMessageEntry) +_sym_db.RegisterMessage(TestMap.MapStringForeignMessageEntry) + +TestMapSubmessage = _reflection.GeneratedProtocolMessageType('TestMapSubmessage', (_message.Message,), dict( + DESCRIPTOR = _TESTMAPSUBMESSAGE, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMapSubmessage) + )) +_sym_db.RegisterMessage(TestMapSubmessage) + +TestMessageMap = _reflection.GeneratedProtocolMessageType('TestMessageMap', (_message.Message,), dict( + + MapInt32MessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32MessageEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGEMAP_MAPINT32MESSAGEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageMap.MapInt32MessageEntry) + )) + , + DESCRIPTOR = _TESTMESSAGEMAP, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageMap) + )) +_sym_db.RegisterMessage(TestMessageMap) +_sym_db.RegisterMessage(TestMessageMap.MapInt32MessageEntry) + +TestSameTypeMap = _reflection.GeneratedProtocolMessageType('TestSameTypeMap', (_message.Message,), dict( + + Map1Entry = _reflection.GeneratedProtocolMessageType('Map1Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTSAMETYPEMAP_MAP1ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap.Map1Entry) + )) + , + + Map2Entry = _reflection.GeneratedProtocolMessageType('Map2Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTSAMETYPEMAP_MAP2ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap.Map2Entry) + )) + , + DESCRIPTOR = _TESTSAMETYPEMAP, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestSameTypeMap) + )) +_sym_db.RegisterMessage(TestSameTypeMap) +_sym_db.RegisterMessage(TestSameTypeMap.Map1Entry) +_sym_db.RegisterMessage(TestSameTypeMap.Map2Entry) + +TestRequiredMessageMap = _reflection.GeneratedProtocolMessageType('TestRequiredMessageMap', (_message.Message,), dict( + + MapFieldEntry = _reflection.GeneratedProtocolMessageType('MapFieldEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessageMap.MapFieldEntry) + )) + , + DESCRIPTOR = _TESTREQUIREDMESSAGEMAP, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredMessageMap) + )) +_sym_db.RegisterMessage(TestRequiredMessageMap) +_sym_db.RegisterMessage(TestRequiredMessageMap.MapFieldEntry) + +TestArenaMap = _reflection.GeneratedProtocolMessageType('TestArenaMap', (_message.Message,), dict( + + MapInt32Int32Entry = _reflection.GeneratedProtocolMessageType('MapInt32Int32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32INT32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32Int32Entry) + )) + , + + MapInt64Int64Entry = _reflection.GeneratedProtocolMessageType('MapInt64Int64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT64INT64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt64Int64Entry) + )) + , + + MapUint32Uint32Entry = _reflection.GeneratedProtocolMessageType('MapUint32Uint32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPUINT32UINT32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapUint32Uint32Entry) + )) + , + + MapUint64Uint64Entry = _reflection.GeneratedProtocolMessageType('MapUint64Uint64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPUINT64UINT64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapUint64Uint64Entry) + )) + , + + MapSint32Sint32Entry = _reflection.GeneratedProtocolMessageType('MapSint32Sint32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPSINT32SINT32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSint32Sint32Entry) + )) + , + + MapSint64Sint64Entry = _reflection.GeneratedProtocolMessageType('MapSint64Sint64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPSINT64SINT64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSint64Sint64Entry) + )) + , + + MapFixed32Fixed32Entry = _reflection.GeneratedProtocolMessageType('MapFixed32Fixed32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPFIXED32FIXED32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapFixed32Fixed32Entry) + )) + , + + MapFixed64Fixed64Entry = _reflection.GeneratedProtocolMessageType('MapFixed64Fixed64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPFIXED64FIXED64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapFixed64Fixed64Entry) + )) + , + + MapSfixed32Sfixed32Entry = _reflection.GeneratedProtocolMessageType('MapSfixed32Sfixed32Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSfixed32Sfixed32Entry) + )) + , + + MapSfixed64Sfixed64Entry = _reflection.GeneratedProtocolMessageType('MapSfixed64Sfixed64Entry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapSfixed64Sfixed64Entry) + )) + , + + MapInt32FloatEntry = _reflection.GeneratedProtocolMessageType('MapInt32FloatEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32FLOATENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32FloatEntry) + )) + , + + MapInt32DoubleEntry = _reflection.GeneratedProtocolMessageType('MapInt32DoubleEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32DOUBLEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32DoubleEntry) + )) + , + + MapBoolBoolEntry = _reflection.GeneratedProtocolMessageType('MapBoolBoolEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPBOOLBOOLENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapBoolBoolEntry) + )) + , + + MapStringStringEntry = _reflection.GeneratedProtocolMessageType('MapStringStringEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPSTRINGSTRINGENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapStringStringEntry) + )) + , + + MapInt32BytesEntry = _reflection.GeneratedProtocolMessageType('MapInt32BytesEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32BYTESENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32BytesEntry) + )) + , + + MapInt32EnumEntry = _reflection.GeneratedProtocolMessageType('MapInt32EnumEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32ENUMENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32EnumEntry) + )) + , + + MapInt32ForeignMessageEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32ForeignMessageEntry) + )) + , + + MapInt32ForeignMessageNoArenaEntry = _reflection.GeneratedProtocolMessageType('MapInt32ForeignMessageNoArenaEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap.MapInt32ForeignMessageNoArenaEntry) + )) + , + DESCRIPTOR = _TESTARENAMAP, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestArenaMap) + )) +_sym_db.RegisterMessage(TestArenaMap) +_sym_db.RegisterMessage(TestArenaMap.MapInt32Int32Entry) +_sym_db.RegisterMessage(TestArenaMap.MapInt64Int64Entry) +_sym_db.RegisterMessage(TestArenaMap.MapUint32Uint32Entry) +_sym_db.RegisterMessage(TestArenaMap.MapUint64Uint64Entry) +_sym_db.RegisterMessage(TestArenaMap.MapSint32Sint32Entry) +_sym_db.RegisterMessage(TestArenaMap.MapSint64Sint64Entry) +_sym_db.RegisterMessage(TestArenaMap.MapFixed32Fixed32Entry) +_sym_db.RegisterMessage(TestArenaMap.MapFixed64Fixed64Entry) +_sym_db.RegisterMessage(TestArenaMap.MapSfixed32Sfixed32Entry) +_sym_db.RegisterMessage(TestArenaMap.MapSfixed64Sfixed64Entry) +_sym_db.RegisterMessage(TestArenaMap.MapInt32FloatEntry) +_sym_db.RegisterMessage(TestArenaMap.MapInt32DoubleEntry) +_sym_db.RegisterMessage(TestArenaMap.MapBoolBoolEntry) +_sym_db.RegisterMessage(TestArenaMap.MapStringStringEntry) +_sym_db.RegisterMessage(TestArenaMap.MapInt32BytesEntry) +_sym_db.RegisterMessage(TestArenaMap.MapInt32EnumEntry) +_sym_db.RegisterMessage(TestArenaMap.MapInt32ForeignMessageEntry) +_sym_db.RegisterMessage(TestArenaMap.MapInt32ForeignMessageNoArenaEntry) + +MessageContainingEnumCalledType = _reflection.GeneratedProtocolMessageType('MessageContainingEnumCalledType', (_message.Message,), dict( + + TypeEntry = _reflection.GeneratedProtocolMessageType('TypeEntry', (_message.Message,), dict( + DESCRIPTOR = _MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingEnumCalledType.TypeEntry) + )) + , + DESCRIPTOR = _MESSAGECONTAININGENUMCALLEDTYPE, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingEnumCalledType) + )) +_sym_db.RegisterMessage(MessageContainingEnumCalledType) +_sym_db.RegisterMessage(MessageContainingEnumCalledType.TypeEntry) + +MessageContainingMapCalledEntry = _reflection.GeneratedProtocolMessageType('MessageContainingMapCalledEntry', (_message.Message,), dict( + + EntryEntry = _reflection.GeneratedProtocolMessageType('EntryEntry', (_message.Message,), dict( + DESCRIPTOR = _MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingMapCalledEntry.EntryEntry) + )) + , + DESCRIPTOR = _MESSAGECONTAININGMAPCALLEDENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.MessageContainingMapCalledEntry) + )) +_sym_db.RegisterMessage(MessageContainingMapCalledEntry) +_sym_db.RegisterMessage(MessageContainingMapCalledEntry.EntryEntry) + +TestRecursiveMapMessage = _reflection.GeneratedProtocolMessageType('TestRecursiveMapMessage', (_message.Message,), dict( + + AEntry = _reflection.GeneratedProtocolMessageType('AEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTRECURSIVEMAPMESSAGE_AENTRY, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMapMessage.AEntry) + )) + , + DESCRIPTOR = _TESTRECURSIVEMAPMESSAGE, + __module__ = 'google.protobuf.map_unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMapMessage) + )) +_sym_db.RegisterMessage(TestRecursiveMapMessage) +_sym_db.RegisterMessage(TestRecursiveMapMessage.AEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001')) +_TESTMAP_MAPINT32INT32ENTRY.has_options = True +_TESTMAP_MAPINT32INT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPINT64INT64ENTRY.has_options = True +_TESTMAP_MAPINT64INT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPUINT32UINT32ENTRY.has_options = True +_TESTMAP_MAPUINT32UINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPUINT64UINT64ENTRY.has_options = True +_TESTMAP_MAPUINT64UINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPSINT32SINT32ENTRY.has_options = True +_TESTMAP_MAPSINT32SINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPSINT64SINT64ENTRY.has_options = True +_TESTMAP_MAPSINT64SINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPFIXED32FIXED32ENTRY.has_options = True +_TESTMAP_MAPFIXED32FIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPFIXED64FIXED64ENTRY.has_options = True +_TESTMAP_MAPFIXED64FIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPSFIXED32SFIXED32ENTRY.has_options = True +_TESTMAP_MAPSFIXED32SFIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPSFIXED64SFIXED64ENTRY.has_options = True +_TESTMAP_MAPSFIXED64SFIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPINT32FLOATENTRY.has_options = True +_TESTMAP_MAPINT32FLOATENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPINT32DOUBLEENTRY.has_options = True +_TESTMAP_MAPINT32DOUBLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPBOOLBOOLENTRY.has_options = True +_TESTMAP_MAPBOOLBOOLENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPSTRINGSTRINGENTRY.has_options = True +_TESTMAP_MAPSTRINGSTRINGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPINT32BYTESENTRY.has_options = True +_TESTMAP_MAPINT32BYTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPINT32ENUMENTRY.has_options = True +_TESTMAP_MAPINT32ENUMENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY.has_options = True +_TESTMAP_MAPINT32FOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY.has_options = True +_TESTMAP_MAPSTRINGFOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY.has_options = True +_TESTMESSAGEMAP_MAPINT32MESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTSAMETYPEMAP_MAP1ENTRY.has_options = True +_TESTSAMETYPEMAP_MAP1ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTSAMETYPEMAP_MAP2ENTRY.has_options = True +_TESTSAMETYPEMAP_MAP2ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY.has_options = True +_TESTREQUIREDMESSAGEMAP_MAPFIELDENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32INT32ENTRY.has_options = True +_TESTARENAMAP_MAPINT32INT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT64INT64ENTRY.has_options = True +_TESTARENAMAP_MAPINT64INT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPUINT32UINT32ENTRY.has_options = True +_TESTARENAMAP_MAPUINT32UINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPUINT64UINT64ENTRY.has_options = True +_TESTARENAMAP_MAPUINT64UINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPSINT32SINT32ENTRY.has_options = True +_TESTARENAMAP_MAPSINT32SINT32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPSINT64SINT64ENTRY.has_options = True +_TESTARENAMAP_MAPSINT64SINT64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPFIXED32FIXED32ENTRY.has_options = True +_TESTARENAMAP_MAPFIXED32FIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPFIXED64FIXED64ENTRY.has_options = True +_TESTARENAMAP_MAPFIXED64FIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY.has_options = True +_TESTARENAMAP_MAPSFIXED32SFIXED32ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY.has_options = True +_TESTARENAMAP_MAPSFIXED64SFIXED64ENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32FLOATENTRY.has_options = True +_TESTARENAMAP_MAPINT32FLOATENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32DOUBLEENTRY.has_options = True +_TESTARENAMAP_MAPINT32DOUBLEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPBOOLBOOLENTRY.has_options = True +_TESTARENAMAP_MAPBOOLBOOLENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPSTRINGSTRINGENTRY.has_options = True +_TESTARENAMAP_MAPSTRINGSTRINGENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32BYTESENTRY.has_options = True +_TESTARENAMAP_MAPINT32BYTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32ENUMENTRY.has_options = True +_TESTARENAMAP_MAPINT32ENUMENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY.has_options = True +_TESTARENAMAP_MAPINT32FOREIGNMESSAGEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY.has_options = True +_TESTARENAMAP_MAPINT32FOREIGNMESSAGENOARENAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY.has_options = True +_MESSAGECONTAININGENUMCALLEDTYPE_TYPEENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY.has_options = True +_MESSAGECONTAININGMAPCALLEDENTRY_ENTRYENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTRECURSIVEMAPMESSAGE_AENTRY.has_options = True +_TESTRECURSIVEMAPMESSAGE_AENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/message.py b/deps/google/protobuf/message.py new file mode 100644 index 00000000..de2f5697 --- /dev/null +++ b/deps/google/protobuf/message.py @@ -0,0 +1,292 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# TODO(robinson): We should just make these methods all "pure-virtual" and move +# all implementation out, into reflection.py for now. + + +"""Contains an abstract base class for protocol messages.""" + +__author__ = 'robinson@google.com (Will Robinson)' + +class Error(Exception): pass +class DecodeError(Error): pass +class EncodeError(Error): pass + + +class Message(object): + + """Abstract base class for protocol messages. + + Protocol message classes are almost always generated by the protocol + compiler. These generated types subclass Message and implement the methods + shown below. + + TODO(robinson): Link to an HTML document here. + + TODO(robinson): Document that instances of this class will also + have an Extensions attribute with __getitem__ and __setitem__. + Again, not sure how to best convey this. + + TODO(robinson): Document that the class must also have a static + RegisterExtension(extension_field) method. + Not sure how to best express at this point. + """ + + # TODO(robinson): Document these fields and methods. + + __slots__ = [] + + DESCRIPTOR = None + + def __deepcopy__(self, memo=None): + clone = type(self)() + clone.MergeFrom(self) + return clone + + def __eq__(self, other_msg): + """Recursively compares two messages by value and structure.""" + raise NotImplementedError + + def __ne__(self, other_msg): + # Can't just say self != other_msg, since that would infinitely recurse. :) + return not self == other_msg + + def __hash__(self): + raise TypeError('unhashable object') + + def __str__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def __unicode__(self): + """Outputs a human-readable representation of the message.""" + raise NotImplementedError + + def MergeFrom(self, other_msg): + """Merges the contents of the specified message into current message. + + This method merges the contents of the specified message into the current + message. Singular fields that are set in the specified message overwrite + the corresponding fields in the current message. Repeated fields are + appended. Singular sub-messages and groups are recursively merged. + + Args: + other_msg: Message to merge into the current message. + """ + raise NotImplementedError + + def CopyFrom(self, other_msg): + """Copies the content of the specified message into the current message. + + The method clears the current message and then merges the specified + message using MergeFrom. + + Args: + other_msg: Message to copy into the current one. + """ + if self is other_msg: + return + self.Clear() + self.MergeFrom(other_msg) + + def Clear(self): + """Clears all data that was set in the message.""" + raise NotImplementedError + + def SetInParent(self): + """Mark this as present in the parent. + + This normally happens automatically when you assign a field of a + sub-message, but sometimes you want to make the sub-message + present while keeping it empty. If you find yourself using this, + you may want to reconsider your design.""" + raise NotImplementedError + + def IsInitialized(self): + """Checks if the message is initialized. + + Returns: + The method returns True if the message is initialized (i.e. all of its + required fields are set). + """ + raise NotImplementedError + + # TODO(robinson): MergeFromString() should probably return None and be + # implemented in terms of a helper that returns the # of bytes read. Our + # deserialization routines would use the helper when recursively + # deserializing, but the end user would almost always just want the no-return + # MergeFromString(). + + def MergeFromString(self, serialized): + """Merges serialized protocol buffer data into this message. + + When we find a field in |serialized| that is already present + in this message: + - If it's a "repeated" field, we append to the end of our list. + - Else, if it's a scalar, we overwrite our field. + - Else, (it's a nonrepeated composite), we recursively merge + into the existing composite. + + TODO(robinson): Document handling of unknown fields. + + Args: + serialized: Any object that allows us to call buffer(serialized) + to access a string of bytes using the buffer interface. + + TODO(robinson): When we switch to a helper, this will return None. + + Returns: + The number of bytes read from |serialized|. + For non-group messages, this will always be len(serialized), + but for messages which are actually groups, this will + generally be less than len(serialized), since we must + stop when we reach an END_GROUP tag. Note that if + we *do* stop because of an END_GROUP tag, the number + of bytes returned does not include the bytes + for the END_GROUP tag information. + """ + raise NotImplementedError + + def ParseFromString(self, serialized): + """Parse serialized protocol buffer data into this message. + + Like MergeFromString(), except we clear the object first and + do not return the value that MergeFromString returns. + """ + self.Clear() + self.MergeFromString(serialized) + + def SerializeToString(self): + """Serializes the protocol message to a binary string. + + Returns: + A binary string representation of the message if all of the required + fields in the message are set (i.e. the message is initialized). + + Raises: + message.EncodeError if the message isn't initialized. + """ + raise NotImplementedError + + def SerializePartialToString(self): + """Serializes the protocol message to a binary string. + + This method is similar to SerializeToString but doesn't check if the + message is initialized. + + Returns: + A string representation of the partial message. + """ + raise NotImplementedError + + # TODO(robinson): Decide whether we like these better + # than auto-generated has_foo() and clear_foo() methods + # on the instances themselves. This way is less consistent + # with C++, but it makes reflection-type access easier and + # reduces the number of magically autogenerated things. + # + # TODO(robinson): Be sure to document (and test) exactly + # which field names are accepted here. Are we case-sensitive? + # What do we do with fields that share names with Python keywords + # like 'lambda' and 'yield'? + # + # nnorwitz says: + # """ + # Typically (in python), an underscore is appended to names that are + # keywords. So they would become lambda_ or yield_. + # """ + def ListFields(self): + """Returns a list of (FieldDescriptor, value) tuples for all + fields in the message which are not empty. A singular field is non-empty + if HasField() would return true, and a repeated field is non-empty if + it contains at least one element. The fields are ordered by field + number""" + raise NotImplementedError + + def HasField(self, field_name): + """Checks if a certain field is set for the message, or if any field inside + a oneof group is set. Note that if the field_name is not defined in the + message descriptor, ValueError will be raised.""" + raise NotImplementedError + + def ClearField(self, field_name): + """Clears the contents of a given field, or the field set inside a oneof + group. If the name neither refers to a defined field or oneof group, + ValueError is raised.""" + raise NotImplementedError + + def WhichOneof(self, oneof_group): + """Returns the name of the field that is set inside a oneof group, or + None if no field is set. If no group with the given name exists, ValueError + will be raised.""" + raise NotImplementedError + + def HasExtension(self, extension_handle): + raise NotImplementedError + + def ClearExtension(self, extension_handle): + raise NotImplementedError + + def ByteSize(self): + """Returns the serialized size of this message. + Recursively calls ByteSize() on all contained messages. + """ + raise NotImplementedError + + def _SetListener(self, message_listener): + """Internal method used by the protocol message implementation. + Clients should not call this directly. + + Sets a listener that this message will call on certain state transitions. + + The purpose of this method is to register back-edges from children to + parents at runtime, for the purpose of setting "has" bits and + byte-size-dirty bits in the parent and ancestor objects whenever a child or + descendant object is modified. + + If the client wants to disconnect this Message from the object tree, she + explicitly sets callback to None. + + If message_listener is None, unregisters any existing listener. Otherwise, + message_listener must implement the MessageListener interface in + internal/message_listener.py, and we discard any listener registered + via a previous _SetListener() call. + """ + raise NotImplementedError + + def __getstate__(self): + """Support the pickle protocol.""" + return dict(serialized=self.SerializePartialToString()) + + def __setstate__(self, state): + """Support the pickle protocol.""" + self.__init__() + self.ParseFromString(state['serialized']) diff --git a/deps/google/protobuf/message_factory.py b/deps/google/protobuf/message_factory.py new file mode 100644 index 00000000..1b059d13 --- /dev/null +++ b/deps/google/protobuf/message_factory.py @@ -0,0 +1,147 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Provides a factory class for generating dynamic messages. + +The easiest way to use this class is if you have access to the FileDescriptor +protos containing the messages you want to create you can just do the following: + +message_classes = message_factory.GetMessages(iterable_of_file_descriptors) +my_proto_instance = message_classes['some.proto.package.MessageName']() +""" + +__author__ = 'matthewtoia@google.com (Matt Toia)' + +from google.protobuf import descriptor_pool +from google.protobuf import message +from google.protobuf import reflection + + +class MessageFactory(object): + """Factory for creating Proto2 messages from descriptors in a pool.""" + + def __init__(self, pool=None): + """Initializes a new factory.""" + self.pool = pool or descriptor_pool.DescriptorPool() + + # local cache of all classes built from protobuf descriptors + self._classes = {} + + def GetPrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + if descriptor.full_name not in self._classes: + descriptor_name = descriptor.name + if str is bytes: # PY2 + descriptor_name = descriptor.name.encode('ascii', 'ignore') + result_class = reflection.GeneratedProtocolMessageType( + descriptor_name, + (message.Message,), + {'DESCRIPTOR': descriptor, '__module__': None}) + # If module not set, it wrongly points to the reflection.py module. + self._classes[descriptor.full_name] = result_class + for field in descriptor.fields: + if field.message_type: + self.GetPrototype(field.message_type) + for extension in result_class.DESCRIPTOR.extensions: + if extension.containing_type.full_name not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type.full_name] + extended_class.RegisterExtension(extension) + return self._classes[descriptor.full_name] + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if the descriptor + pool cannot satisfy them. + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + result = {} + for file_name in files: + file_desc = self.pool.FindFileByName(file_name) + for name, msg in file_desc.message_types_by_name.items(): + if file_desc.package: + full_name = '.'.join([file_desc.package, name]) + else: + full_name = msg.name + result[full_name] = self.GetPrototype( + self.pool.FindMessageTypeByName(full_name)) + + # While the extension FieldDescriptors are created by the descriptor pool, + # the python classes created in the factory need them to be registered + # explicitly, which is done below. + # + # The call to RegisterExtension will specifically check if the + # extension was already registered on the object and either + # ignore the registration if the original was the same, or raise + # an error if they were different. + + for name, extension in file_desc.extensions_by_name.items(): + if extension.containing_type.full_name not in self._classes: + self.GetPrototype(extension.containing_type) + extended_class = self._classes[extension.containing_type.full_name] + extended_class.RegisterExtension(extension) + return result + + +_FACTORY = MessageFactory() + + +def GetMessages(file_protos): + """Builds a dictionary of all the messages available in a set of files. + + Args: + file_protos: A sequence of file protos to build messages out of. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + """ + for file_proto in file_protos: + _FACTORY.pool.Add(file_proto) + return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/deps/google/protobuf/proto_builder.py b/deps/google/protobuf/proto_builder.py new file mode 100644 index 00000000..736caed3 --- /dev/null +++ b/deps/google/protobuf/proto_builder.py @@ -0,0 +1,130 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Dynamic Protobuf class creator.""" + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict #PY26 +import hashlib +import os + +from google.protobuf import descriptor_pb2 +from google.protobuf import message_factory + + +def _GetMessageFromFactory(factory, full_name): + """Get a proto class from the MessageFactory by name. + + Args: + factory: a MessageFactory instance. + full_name: str, the fully qualified name of the proto type. + Returns: + A class, for the type identified by full_name. + Raises: + KeyError, if the proto is not found in the factory's descriptor pool. + """ + proto_descriptor = factory.pool.FindMessageTypeByName(full_name) + proto_cls = factory.GetPrototype(proto_descriptor) + return proto_cls + + +def MakeSimpleProtoClass(fields, full_name=None, pool=None): + """Create a Protobuf class whose fields are basic types. + + Note: this doesn't validate field names! + + Args: + fields: dict of {name: field_type} mappings for each field in the proto. If + this is an OrderedDict the order will be maintained, otherwise the + fields will be sorted by name. + full_name: optional str, the fully-qualified name of the proto type. + pool: optional DescriptorPool instance. + Returns: + a class, the new protobuf class with a FileDescriptor. + """ + factory = message_factory.MessageFactory(pool=pool) + + if full_name is not None: + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # Get a list of (name, field_type) tuples from the fields dict. If fields was + # an OrderedDict we keep the order, but otherwise we sort the field to ensure + # consistent ordering. + field_items = fields.items() + if not isinstance(fields, OrderedDict): + field_items = sorted(field_items) + + # Use a consistent file name that is unlikely to conflict with any imported + # proto files. + fields_hash = hashlib.sha1() + for f_name, f_type in field_items: + fields_hash.update(f_name.encode('utf-8')) + fields_hash.update(str(f_type).encode('utf-8')) + proto_file_name = fields_hash.hexdigest() + '.proto' + + # If the proto is anonymous, use the same hash to name it. + if full_name is None: + full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + + fields_hash.hexdigest()) + try: + proto_cls = _GetMessageFromFactory(factory, full_name) + return proto_cls + except KeyError: + # The factory's DescriptorPool doesn't know about this class yet. + pass + + # This is the first time we see this proto: add a new descriptor to the pool. + factory.pool.Add( + _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) + return _GetMessageFromFactory(factory, full_name) + + +def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): + """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" + package, name = full_name.rsplit('.', 1) + file_proto = descriptor_pb2.FileDescriptorProto() + file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) + file_proto.package = package + desc_proto = file_proto.message_type.add() + desc_proto.name = name + for f_number, (f_name, f_type) in enumerate(field_items, 1): + field_proto = desc_proto.field.add() + field_proto.name = f_name + field_proto.number = f_number + field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL + field_proto.type = f_type + return file_proto diff --git a/deps/google/protobuf/pyext/__init__.py b/deps/google/protobuf/pyext/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/deps/google/protobuf/pyext/__pycache__/__init__.cpython-34.pyc b/deps/google/protobuf/pyext/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..53d9eb82 Binary files /dev/null and b/deps/google/protobuf/pyext/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/google/protobuf/pyext/__pycache__/cpp_message.cpython-34.pyc b/deps/google/protobuf/pyext/__pycache__/cpp_message.cpython-34.pyc new file mode 100644 index 00000000..638d57e8 Binary files /dev/null and b/deps/google/protobuf/pyext/__pycache__/cpp_message.cpython-34.pyc differ diff --git a/deps/google/protobuf/pyext/__pycache__/python_pb2.cpython-34.pyc b/deps/google/protobuf/pyext/__pycache__/python_pb2.cpython-34.pyc new file mode 100644 index 00000000..1e26c2bd Binary files /dev/null and b/deps/google/protobuf/pyext/__pycache__/python_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/pyext/cpp_message.py b/deps/google/protobuf/pyext/cpp_message.py new file mode 100644 index 00000000..b215211e --- /dev/null +++ b/deps/google/protobuf/pyext/cpp_message.py @@ -0,0 +1,65 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Protocol message implementation hooks for C++ implementation. + +Contains helper functions used to create protocol message classes from +Descriptor objects at runtime backed by the protocol buffer C++ API. +""" + +__author__ = 'tibell@google.com (Johan Tibell)' + +from google.protobuf.pyext import _message + + +class GeneratedProtocolMessageType(_message.MessageMeta): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + class MyProtoClass(Message): + __metaclass__ = GeneratedProtocolMessageType + DESCRIPTOR = mydescriptor + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + + The above example will not work for nested types. If you wish to include them, + use reflection.MakeClass() instead of manually instantiating the class in + order to create the appropriate class structure. + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/deps/google/protobuf/pyext/python_pb2.py b/deps/google/protobuf/pyext/python_pb2.py new file mode 100644 index 00000000..3750682a --- /dev/null +++ b/deps/google/protobuf/pyext/python_pb2.py @@ -0,0 +1,234 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/pyext/python.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/pyext/python.proto', + package='google.protobuf.python.internal', + syntax='proto2', + serialized_pb=_b('\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + +OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 1 +optional_nested_message_extension = _descriptor.FieldDescriptor( + name='optional_nested_message_extension', full_name='google.protobuf.python.internal.optional_nested_message_extension', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 2 +repeated_nested_message_extension = _descriptor.FieldDescriptor( + name='repeated_nested_message_extension', full_name='google.protobuf.python.internal.repeated_nested_message_extension', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bb', full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage.bb', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cc', full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage.cc', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=300, + serialized_end=388, +) + +_TESTALLTYPES = _descriptor.Descriptor( + name='TestAllTypes', + full_name='google.protobuf.python.internal.TestAllTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_nested_message', full_name='google.protobuf.python.internal.TestAllTypes.repeated_nested_message', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_message', full_name='google.protobuf.python.internal.TestAllTypes.optional_nested_message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_int32', full_name='google.protobuf.python.internal.TestAllTypes.optional_int32', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTALLTYPES_NESTEDMESSAGE, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=72, + serialized_end=388, +) + + +_FOREIGNMESSAGE = _descriptor.Descriptor( + name='ForeignMessage', + full_name='google.protobuf.python.internal.ForeignMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='c', full_name='google.protobuf.python.internal.ForeignMessage.c', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='d', full_name='google.protobuf.python.internal.ForeignMessage.d', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=390, + serialized_end=428, +) + + +_TESTALLEXTENSIONS = _descriptor.Descriptor( + name='TestAllExtensions', + full_name='google.protobuf.python.internal.TestAllExtensions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1, 536870912), ], + oneofs=[ + ], + serialized_start=430, + serialized_end=459, +) + +_TESTALLTYPES_NESTEDMESSAGE.fields_by_name['cc'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES +_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES +DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE +DESCRIPTOR.message_types_by_name['TestAllExtensions'] = _TESTALLEXTENSIONS +DESCRIPTOR.extensions_by_name['optional_nested_message_extension'] = optional_nested_message_extension +DESCRIPTOR.extensions_by_name['repeated_nested_message_extension'] = repeated_nested_message_extension + +TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE, + __module__ = 'google.protobuf.pyext.python_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllTypes.NestedMessage) + )) + , + DESCRIPTOR = _TESTALLTYPES, + __module__ = 'google.protobuf.pyext.python_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllTypes) + )) +_sym_db.RegisterMessage(TestAllTypes) +_sym_db.RegisterMessage(TestAllTypes.NestedMessage) + +ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict( + DESCRIPTOR = _FOREIGNMESSAGE, + __module__ = 'google.protobuf.pyext.python_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.ForeignMessage) + )) +_sym_db.RegisterMessage(ForeignMessage) + +TestAllExtensions = _reflection.GeneratedProtocolMessageType('TestAllExtensions', (_message.Message,), dict( + DESCRIPTOR = _TESTALLEXTENSIONS, + __module__ = 'google.protobuf.pyext.python_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllExtensions) + )) +_sym_db.RegisterMessage(TestAllExtensions) + +optional_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(optional_nested_message_extension) +repeated_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(repeated_nested_message_extension) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/reflection.py b/deps/google/protobuf/reflection.py new file mode 100644 index 00000000..0c757264 --- /dev/null +++ b/deps/google/protobuf/reflection.py @@ -0,0 +1,120 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# This code is meant to work on Python 2.4 and above only. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +from google.protobuf.internal import api_implementation +from google.protobuf import message + + +if api_implementation.Type() == 'cpp': + from google.protobuf.pyext import cpp_message as message_impl +else: + from google.protobuf.internal import python_message as message_impl + +# The type of all Message classes. +# Part of the public interface. +# +# Used by generated files, but clients can also use it at runtime: +# mydescriptor = pool.FindDescriptor(.....) +# class MyProtoClass(Message): +# __metaclass__ = GeneratedProtocolMessageType +# DESCRIPTOR = mydescriptor +GeneratedProtocolMessageType = message_impl.GeneratedProtocolMessageType + + +def ParseMessage(descriptor, byte_str): + """Generate a new Message instance from this Descriptor and a byte string. + + Args: + descriptor: Protobuf Descriptor object + byte_str: Serialized protocol buffer byte string + + Returns: + Newly created protobuf Message object. + """ + result_class = MakeClass(descriptor) + new_msg = result_class() + new_msg.ParseFromString(byte_str) + return new_msg + + +def MakeClass(descriptor): + """Construct a class object for a protobuf described by descriptor. + + Composite descriptors are handled by defining the new class as a member of the + parent class, recursing as deep as necessary. + This is the dynamic equivalent to: + + class Parent(message.Message): + __metaclass__ = GeneratedProtocolMessageType + DESCRIPTOR = descriptor + class Child(message.Message): + __metaclass__ = GeneratedProtocolMessageType + DESCRIPTOR = descriptor.nested_types[0] + + Sample usage: + file_descriptor = descriptor_pb2.FileDescriptorProto() + file_descriptor.ParseFromString(proto2_string) + msg_descriptor = descriptor.MakeDescriptor(file_descriptor.message_type[0]) + msg_class = reflection.MakeClass(msg_descriptor) + msg = msg_class() + + Args: + descriptor: A descriptor.Descriptor object describing the protobuf. + Returns: + The Message class object described by the descriptor. + """ + attributes = {} + for name, nested_type in descriptor.nested_types_by_name.items(): + attributes[name] = MakeClass(nested_type) + + attributes[GeneratedProtocolMessageType._DESCRIPTOR_KEY] = descriptor + + return GeneratedProtocolMessageType(str(descriptor.name), (message.Message,), + attributes) diff --git a/deps/google/protobuf/service.py b/deps/google/protobuf/service.py new file mode 100644 index 00000000..9e00de70 --- /dev/null +++ b/deps/google/protobuf/service.py @@ -0,0 +1,226 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""DEPRECATED: Declares the RPC service interfaces. + +This module declares the abstract interfaces underlying proto2 RPC +services. These are intended to be independent of any particular RPC +implementation, so that proto2 services can be used on top of a variety +of implementations. Starting with version 2.3.0, RPC implementations should +not try to build on these, but should instead provide code generator plugins +which generate code specific to the particular RPC implementation. This way +the generated code can be more appropriate for the implementation in use +and can avoid unnecessary layers of indirection. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class RpcException(Exception): + """Exception raised on failed blocking RPC method call.""" + pass + + +class Service(object): + + """Abstract base interface for protocol-buffer-based RPC services. + + Services themselves are abstract classes (implemented either by servers or as + stubs), but they subclass this base interface. The methods of this + interface can be used to call the methods of the service without knowing + its exact type at compile time (analogous to the Message interface). + """ + + def GetDescriptor(): + """Retrieves this service's descriptor.""" + raise NotImplementedError + + def CallMethod(self, method_descriptor, rpc_controller, + request, done): + """Calls a method of the service specified by method_descriptor. + + If "done" is None then the call is blocking and the response + message will be returned directly. Otherwise the call is asynchronous + and "done" will later be called with the response value. + + In the blocking case, RpcException will be raised on error. + + Preconditions: + * method_descriptor.service == GetDescriptor + * request is of the exact same classes as returned by + GetRequestClass(method). + * After the call has started, the request must not be modified. + * "rpc_controller" is of the correct type for the RPC implementation being + used by this Service. For stubs, the "correct type" depends on the + RpcChannel which the stub is using. + + Postconditions: + * "done" will be called when the method is complete. This may be + before CallMethod() returns or it may be at some point in the future. + * If the RPC failed, the response value passed to "done" will be None. + Further details about the failure can be found by querying the + RpcController. + """ + raise NotImplementedError + + def GetRequestClass(self, method_descriptor): + """Returns the class of the request message for the specified method. + + CallMethod() requires that the request is of a particular subclass of + Message. GetRequestClass() gets the default instance of this required + type. + + Example: + method = service.GetDescriptor().FindMethodByName("Foo") + request = stub.GetRequestClass(method)() + request.ParseFromString(input) + service.CallMethod(method, request, callback) + """ + raise NotImplementedError + + def GetResponseClass(self, method_descriptor): + """Returns the class of the response message for the specified method. + + This method isn't really needed, as the RpcChannel's CallMethod constructs + the response protocol message. It's provided anyway in case it is useful + for the caller to know the response type in advance. + """ + raise NotImplementedError + + +class RpcController(object): + + """An RpcController mediates a single method call. + + The primary purpose of the controller is to provide a way to manipulate + settings specific to the RPC implementation and to find out about RPC-level + errors. The methods provided by the RpcController interface are intended + to be a "least common denominator" set of features which we expect all + implementations to support. Specific implementations may provide more + advanced features (e.g. deadline propagation). + """ + + # Client-side methods below + + def Reset(self): + """Resets the RpcController to its initial state. + + After the RpcController has been reset, it may be reused in + a new call. Must not be called while an RPC is in progress. + """ + raise NotImplementedError + + def Failed(self): + """Returns true if the call failed. + + After a call has finished, returns true if the call failed. The possible + reasons for failure depend on the RPC implementation. Failed() must not + be called before a call has finished. If Failed() returns true, the + contents of the response message are undefined. + """ + raise NotImplementedError + + def ErrorText(self): + """If Failed is true, returns a human-readable description of the error.""" + raise NotImplementedError + + def StartCancel(self): + """Initiate cancellation. + + Advises the RPC system that the caller desires that the RPC call be + canceled. The RPC system may cancel it immediately, may wait awhile and + then cancel it, or may not even cancel the call at all. If the call is + canceled, the "done" callback will still be called and the RpcController + will indicate that the call failed at that time. + """ + raise NotImplementedError + + # Server-side methods below + + def SetFailed(self, reason): + """Sets a failure reason. + + Causes Failed() to return true on the client side. "reason" will be + incorporated into the message returned by ErrorText(). If you find + you need to return machine-readable information about failures, you + should incorporate it into your response protocol buffer and should + NOT call SetFailed(). + """ + raise NotImplementedError + + def IsCanceled(self): + """Checks if the client cancelled the RPC. + + If true, indicates that the client canceled the RPC, so the server may + as well give up on replying to it. The server should still call the + final "done" callback. + """ + raise NotImplementedError + + def NotifyOnCancel(self, callback): + """Sets a callback to invoke on cancel. + + Asks that the given callback be called when the RPC is canceled. The + callback will always be called exactly once. If the RPC completes without + being canceled, the callback will be called after completion. If the RPC + has already been canceled when NotifyOnCancel() is called, the callback + will be called immediately. + + NotifyOnCancel() must be called no more than once per request. + """ + raise NotImplementedError + + +class RpcChannel(object): + + """Abstract interface for an RPC channel. + + An RpcChannel represents a communication line to a service which can be used + to call that service's methods. The service may be running on another + machine. Normally, you should not use an RpcChannel directly, but instead + construct a stub {@link Service} wrapping it. Example: + + Example: + RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") + RpcController controller = rpcImpl.Controller() + MyService service = MyService_Stub(channel) + service.MyMethod(controller, request, callback) + """ + + def CallMethod(self, method_descriptor, rpc_controller, + request, response_class, done): + """Calls the method identified by the descriptor. + + Call the given method of the remote service. The signature of this + procedure looks the same as Service.CallMethod(), but the requirements + are less strict in one important way: the request object doesn't have to + be of any specific class as long as its descriptor is method.input_type. + """ + raise NotImplementedError diff --git a/deps/google/protobuf/service_reflection.py b/deps/google/protobuf/service_reflection.py new file mode 100644 index 00000000..1c3636af --- /dev/null +++ b/deps/google/protobuf/service_reflection.py @@ -0,0 +1,284 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains metaclasses used to create protocol service and service stub +classes from ServiceDescriptor objects at runtime. + +The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to +inject all useful functionality into the classes output by the protocol +compiler at compile-time. +""" + +__author__ = 'petar@google.com (Petar Petrov)' + + +class GeneratedServiceType(type): + + """Metaclass for service classes created at runtime from ServiceDescriptors. + + Implementations for all methods described in the Service class are added here + by this class. We also create properties to allow getting/setting all fields + in the protocol message. + + The protocol compiler currently uses this metaclass to create protocol service + classes at runtime. Clients can also manually create their own classes at + runtime, as in this example: + + mydescriptor = ServiceDescriptor(.....) + class MyProtoService(service.Service): + __metaclass__ = GeneratedServiceType + DESCRIPTOR = mydescriptor + myservice_instance = MyProtoService() + ... + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service class. + + Args: + name: Name of the class (ignored, but required by the metaclass + protocol). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service class is subclassed. + if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: + return + descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] + service_builder = _ServiceBuilder(descriptor) + service_builder.BuildService(cls) + + +class GeneratedServiceStubType(GeneratedServiceType): + + """Metaclass for service stubs created at runtime from ServiceDescriptors. + + This class has similar responsibilities as GeneratedServiceType, except that + it creates the service stub classes. + """ + + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __init__(cls, name, bases, dictionary): + """Creates a message service stub class. + + Args: + name: Name of the class (ignored, here). + bases: Base classes of the class being constructed. + dictionary: The class dictionary of the class being constructed. + dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object + describing this protocol service type. + """ + super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) + # Don't do anything if this class doesn't have a descriptor. This happens + # when a service stub is subclassed. + if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: + return + descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] + service_stub_builder = _ServiceStubBuilder(descriptor) + service_stub_builder.BuildServiceStub(cls) + + +class _ServiceBuilder(object): + + """This class constructs a protocol service class using a service descriptor. + + Given a service descriptor, this class constructs a class that represents + the specified service descriptor. One service builder instance constructs + exactly one service class. That means all instances of that class share the + same builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + service class. + """ + self.descriptor = service_descriptor + + def BuildService(self, cls): + """Constructs the service class. + + Args: + cls: The class that will be constructed. + """ + + # CallMethod needs to operate with an instance of the Service class. This + # internal wrapper function exists only to be able to pass the service + # instance to the method that does the real CallMethod work. + def _WrapCallMethod(srvc, method_descriptor, + rpc_controller, request, callback): + return self._CallMethod(srvc, method_descriptor, + rpc_controller, request, callback) + self.cls = cls + cls.CallMethod = _WrapCallMethod + cls.GetDescriptor = staticmethod(lambda: self.descriptor) + cls.GetDescriptor.__doc__ = "Returns the service descriptor." + cls.GetRequestClass = self._GetRequestClass + cls.GetResponseClass = self._GetResponseClass + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateNonImplementedMethod(method)) + + def _CallMethod(self, srvc, method_descriptor, + rpc_controller, request, callback): + """Calls the method described by a given method descriptor. + + Args: + srvc: Instance of the service for which this method is called. + method_descriptor: Descriptor that represent the method to call. + rpc_controller: RPC controller to use for this method's execution. + request: Request protocol message. + callback: A callback to invoke after the method has completed. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'CallMethod() given method descriptor for wrong service type.') + method = getattr(srvc, method_descriptor.name) + return method(rpc_controller, request, callback) + + def _GetRequestClass(self, method_descriptor): + """Returns the class of the request protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + request protocol message class. + + Returns: + A class that represents the input protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetRequestClass() given method descriptor for wrong service type.') + return method_descriptor.input_type._concrete_class + + def _GetResponseClass(self, method_descriptor): + """Returns the class of the response protocol message. + + Args: + method_descriptor: Descriptor of the method for which to return the + response protocol message class. + + Returns: + A class that represents the output protocol message of the specified + method. + """ + if method_descriptor.containing_service != self.descriptor: + raise RuntimeError( + 'GetResponseClass() given method descriptor for wrong service type.') + return method_descriptor.output_type._concrete_class + + def _GenerateNonImplementedMethod(self, method): + """Generates and returns a method that can be set for a service methods. + + Args: + method: Descriptor of the service method for which a method is to be + generated. + + Returns: + A method that can be added to the service class. + """ + return lambda inst, rpc_controller, request, callback: ( + self._NonImplementedMethod(method.name, rpc_controller, callback)) + + def _NonImplementedMethod(self, method_name, rpc_controller, callback): + """The body of all methods in the generated service class. + + Args: + method_name: Name of the method being executed. + rpc_controller: RPC controller used to execute this method. + callback: A callback which will be invoked when the method finishes. + """ + rpc_controller.SetFailed('Method %s not implemented.' % method_name) + callback(None) + + +class _ServiceStubBuilder(object): + + """Constructs a protocol service stub class using a service descriptor. + + Given a service descriptor, this class constructs a suitable stub class. + A stub is just a type-safe wrapper around an RpcChannel which emulates a + local implementation of the service. + + One service stub builder instance constructs exactly one class. It means all + instances of that class share the same service stub builder. + """ + + def __init__(self, service_descriptor): + """Initializes an instance of the service stub class builder. + + Args: + service_descriptor: ServiceDescriptor to use when constructing the + stub class. + """ + self.descriptor = service_descriptor + + def BuildServiceStub(self, cls): + """Constructs the stub class. + + Args: + cls: The class that will be constructed. + """ + + def _ServiceStubInit(stub, rpc_channel): + stub.rpc_channel = rpc_channel + self.cls = cls + cls.__init__ = _ServiceStubInit + for method in self.descriptor.methods: + setattr(cls, method.name, self._GenerateStubMethod(method)) + + def _GenerateStubMethod(self, method): + return (lambda inst, rpc_controller, request, callback=None: + self._StubMethod(inst, method, rpc_controller, request, callback)) + + def _StubMethod(self, stub, method_descriptor, + rpc_controller, request, callback): + """The body of all service methods in the generated stub class. + + Args: + stub: Stub instance. + method_descriptor: Descriptor of the invoked method. + rpc_controller: Rpc controller to execute the method. + request: Request protocol message. + callback: A callback to execute when the method finishes. + Returns: + Response message (in case of blocking call). + """ + return stub.rpc_channel.CallMethod( + method_descriptor, rpc_controller, request, + method_descriptor.output_type._concrete_class, callback) diff --git a/deps/google/protobuf/source_context_pb2.py b/deps/google/protobuf/source_context_pb2.py new file mode 100644 index 00000000..65272945 --- /dev/null +++ b/deps/google/protobuf/source_context_pb2.py @@ -0,0 +1,71 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/source_context.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/source_context.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tBU\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_SOURCECONTEXT = _descriptor.Descriptor( + name='SourceContext', + full_name='google.protobuf.SourceContext', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='file_name', full_name='google.protobuf.SourceContext.file_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=57, + serialized_end=91, +) + +DESCRIPTOR.message_types_by_name['SourceContext'] = _SOURCECONTEXT + +SourceContext = _reflection.GeneratedProtocolMessageType('SourceContext', (_message.Message,), dict( + DESCRIPTOR = _SOURCECONTEXT, + __module__ = 'google.protobuf.source_context_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.SourceContext) + )) +_sym_db.RegisterMessage(SourceContext) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\022SourceContextProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/struct_pb2.py b/deps/google/protobuf/struct_pb2.py new file mode 100644 index 00000000..922b663c --- /dev/null +++ b/deps/google/protobuf/struct_pb2.py @@ -0,0 +1,281 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/struct.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42N\n\x13\x63om.google.protobufB\x0bStructProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_NULLVALUE = _descriptor.EnumDescriptor( + name='NullValue', + full_name='google.protobuf.NullValue', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NULL_VALUE', index=0, number=0, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=474, + serialized_end=501, +) +_sym_db.RegisterEnumDescriptor(_NULLVALUE) + +NullValue = enum_type_wrapper.EnumTypeWrapper(_NULLVALUE) +NULL_VALUE = 0 + + + +_STRUCT_FIELDSENTRY = _descriptor.Descriptor( + name='FieldsEntry', + full_name='google.protobuf.Struct.FieldsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.protobuf.Struct.FieldsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.Struct.FieldsEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=182, +) + +_STRUCT = _descriptor.Descriptor( + name='Struct', + full_name='google.protobuf.Struct', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='google.protobuf.Struct.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_STRUCT_FIELDSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=50, + serialized_end=182, +) + + +_VALUE = _descriptor.Descriptor( + name='Value', + full_name='google.protobuf.Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='null_value', full_name='google.protobuf.Value.null_value', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number_value', full_name='google.protobuf.Value.number_value', index=1, + number=2, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.protobuf.Value.string_value', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bool_value', full_name='google.protobuf.Value.bool_value', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='struct_value', full_name='google.protobuf.Value.struct_value', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='list_value', full_name='google.protobuf.Value.list_value', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='kind', full_name='google.protobuf.Value.kind', + index=0, containing_type=None, fields=[]), + ], + serialized_start=185, + serialized_end=419, +) + + +_LISTVALUE = _descriptor.Descriptor( + name='ListValue', + full_name='google.protobuf.ListValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='values', full_name='google.protobuf.ListValue.values', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=421, + serialized_end=472, +) + +_STRUCT_FIELDSENTRY.fields_by_name['value'].message_type = _VALUE +_STRUCT_FIELDSENTRY.containing_type = _STRUCT +_STRUCT.fields_by_name['fields'].message_type = _STRUCT_FIELDSENTRY +_VALUE.fields_by_name['null_value'].enum_type = _NULLVALUE +_VALUE.fields_by_name['struct_value'].message_type = _STRUCT +_VALUE.fields_by_name['list_value'].message_type = _LISTVALUE +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['null_value']) +_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['number_value']) +_VALUE.fields_by_name['number_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['string_value']) +_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['bool_value']) +_VALUE.fields_by_name['bool_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['struct_value']) +_VALUE.fields_by_name['struct_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['list_value']) +_VALUE.fields_by_name['list_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_LISTVALUE.fields_by_name['values'].message_type = _VALUE +DESCRIPTOR.message_types_by_name['Struct'] = _STRUCT +DESCRIPTOR.message_types_by_name['Value'] = _VALUE +DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE +DESCRIPTOR.enum_types_by_name['NullValue'] = _NULLVALUE + +Struct = _reflection.GeneratedProtocolMessageType('Struct', (_message.Message,), dict( + + FieldsEntry = _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), dict( + DESCRIPTOR = _STRUCT_FIELDSENTRY, + __module__ = 'google.protobuf.struct_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Struct.FieldsEntry) + )) + , + DESCRIPTOR = _STRUCT, + __module__ = 'google.protobuf.struct_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Struct) + )) +_sym_db.RegisterMessage(Struct) +_sym_db.RegisterMessage(Struct.FieldsEntry) + +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( + DESCRIPTOR = _VALUE, + __module__ = 'google.protobuf.struct_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Value) + )) +_sym_db.RegisterMessage(Value) + +ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), dict( + DESCRIPTOR = _LISTVALUE, + __module__ = 'google.protobuf.struct_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.ListValue) + )) +_sym_db.RegisterMessage(ListValue) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\013StructProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +_STRUCT_FIELDSENTRY.has_options = True +_STRUCT_FIELDSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/symbol_database.py b/deps/google/protobuf/symbol_database.py new file mode 100644 index 00000000..87760f26 --- /dev/null +++ b/deps/google/protobuf/symbol_database.py @@ -0,0 +1,185 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A database of Python protocol buffer generated symbols. + +SymbolDatabase makes it easy to create new instances of a registered type, given +only the type's protocol buffer symbol name. Once all symbols are registered, +they can be accessed using either the MessageFactory interface which +SymbolDatabase exposes, or the DescriptorPool interface of the underlying +pool. + +Example usage: + + db = symbol_database.SymbolDatabase() + + # Register symbols of interest, from one or multiple files. + db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) + db.RegisterMessage(my_proto_pb2.MyMessage) + db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) + + # The database can be used as a MessageFactory, to generate types based on + # their name: + types = db.GetMessages(['my_proto.proto']) + my_message_instance = types['MyMessage']() + + # The database's underlying descriptor pool can be queried, so it's not + # necessary to know a type's filename to be able to generate it: + filename = db.pool.FindFileContainingSymbol('MyMessage') + my_message_instance = db.GetMessages([filename])['MyMessage']() + + # This functionality is also provided directly via a convenience method: + my_message_instance = db.GetSymbol('MyMessage')() +""" + + +from google.protobuf import descriptor_pool + + +class SymbolDatabase(object): + """A database of Python generated symbols. + + SymbolDatabase also models message_factory.MessageFactory. + + The symbol database can be used to keep a global registry of all protocol + buffer types used within a program. + """ + + def __init__(self, pool=None): + """Constructor.""" + + self._symbols = {} + self._symbols_by_file = {} + self.pool = pool or descriptor_pool.Default() + + def RegisterMessage(self, message): + """Registers the given message type in the local database. + + Args: + message: a message.Message, to be registered. + + Returns: + The provided message. + """ + + desc = message.DESCRIPTOR + self._symbols[desc.full_name] = message + if desc.file.name not in self._symbols_by_file: + self._symbols_by_file[desc.file.name] = {} + self._symbols_by_file[desc.file.name][desc.full_name] = message + self.pool.AddDescriptor(desc) + return message + + def RegisterEnumDescriptor(self, enum_descriptor): + """Registers the given enum descriptor in the local database. + + Args: + enum_descriptor: a descriptor.EnumDescriptor. + + Returns: + The provided descriptor. + """ + self.pool.AddEnumDescriptor(enum_descriptor) + return enum_descriptor + + def RegisterFileDescriptor(self, file_descriptor): + """Registers the given file descriptor in the local database. + + Args: + file_descriptor: a descriptor.FileDescriptor. + + Returns: + The provided descriptor. + """ + self.pool.AddFileDescriptor(file_descriptor) + + def GetSymbol(self, symbol): + """Tries to find a symbol in the local database. + + Currently, this method only returns message.Message instances, however, if + may be extended in future to support other symbol types. + + Args: + symbol: A str, a protocol buffer symbol. + + Returns: + A Python class corresponding to the symbol. + + Raises: + KeyError: if the symbol could not be found. + """ + + return self._symbols[symbol] + + def GetPrototype(self, descriptor): + """Builds a proto2 message class based on the passed in descriptor. + + Passing a descriptor with a fully qualified name matching a previous + invocation will cause the same class to be returned. + + Args: + descriptor: The descriptor to build from. + + Returns: + A class describing the passed in descriptor. + """ + + return self.GetSymbol(descriptor.full_name) + + def GetMessages(self, files): + """Gets all the messages from a specified file. + + This will find and resolve dependencies, failing if they are not registered + in the symbol database. + + + Args: + files: The file names to extract messages from. + + Returns: + A dictionary mapping proto names to the message classes. This will include + any dependent messages as well as any messages defined in the same file as + a specified message. + + Raises: + KeyError: if a file could not be found. + """ + + result = {} + for f in files: + result.update(self._symbols_by_file[f]) + return result + +_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) + + +def Default(): + """Returns the default SymbolDatabase.""" + return _DEFAULT diff --git a/deps/google/protobuf/text_encoding.py b/deps/google/protobuf/text_encoding.py new file mode 100644 index 00000000..98995638 --- /dev/null +++ b/deps/google/protobuf/text_encoding.py @@ -0,0 +1,107 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Encoding related utilities.""" +import re + +import six + +# Lookup table for utf8 +_cescape_utf8_to_str = [chr(i) for i in range(0, 256)] +_cescape_utf8_to_str[9] = r'\t' # optional escape +_cescape_utf8_to_str[10] = r'\n' # optional escape +_cescape_utf8_to_str[13] = r'\r' # optional escape +_cescape_utf8_to_str[39] = r"\'" # optional escape + +_cescape_utf8_to_str[34] = r'\"' # necessary escape +_cescape_utf8_to_str[92] = r'\\' # necessary escape + +# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) +_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + + [chr(i) for i in range(32, 127)] + + [r'\%03o' % i for i in range(127, 256)]) +_cescape_byte_to_str[9] = r'\t' # optional escape +_cescape_byte_to_str[10] = r'\n' # optional escape +_cescape_byte_to_str[13] = r'\r' # optional escape +_cescape_byte_to_str[39] = r"\'" # optional escape + +_cescape_byte_to_str[34] = r'\"' # necessary escape +_cescape_byte_to_str[92] = r'\\' # necessary escape + + +def CEscape(text, as_utf8): + """Escape a bytes string for use in an ascii protocol buffer. + + text.encode('string_escape') does not seem to satisfy our needs as it + encodes unprintable characters using two-digit hex escapes whereas our + C++ unescaping function allows hex escapes to be any length. So, + "\0011".encode('string_escape') ends up being "\\x011", which will be + decoded in C++ as a single-character string with char code 0x11. + + Args: + text: A byte string to be escaped + as_utf8: Specifies if result should be returned in UTF-8 encoding + Returns: + Escaped string + """ + # PY3 hack: make Ord work for str and bytes: + # //platforms/networking/data uses unicode here, hence basestring. + Ord = ord if isinstance(text, six.string_types) else lambda x: x + if as_utf8: + return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text) + return ''.join(_cescape_byte_to_str[Ord(c)] for c in text) + + +_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') +_cescape_highbit_to_str = ([chr(i) for i in range(0, 127)] + + [r'\%03o' % i for i in range(127, 256)]) + + +def CUnescape(text): + """Unescape a text string with C-style escape sequences to UTF-8 bytes.""" + + def ReplaceHex(m): + # Only replace the match if the number of leading back slashes is odd. i.e. + # the slash itself is not escaped. + if len(m.group(1)) & 1: + return m.group(1) + 'x0' + m.group(2) + return m.group(0) + + # This is required because the 'string_escape' encoding doesn't + # allow single-digit hex escapes (like '\xf'). + result = _CUNESCAPE_HEX.sub(ReplaceHex, text) + + if str is bytes: # PY2 + return result.decode('string_escape') + result = ''.join(_cescape_highbit_to_str[ord(c)] for c in result) + return (result.encode('ascii') # Make it bytes to allow decode. + .decode('unicode_escape') + # Make it bytes again to return the proper type. + .encode('raw_unicode_escape')) diff --git a/deps/google/protobuf/text_format.py b/deps/google/protobuf/text_format.py new file mode 100644 index 00000000..8d256076 --- /dev/null +++ b/deps/google/protobuf/text_format.py @@ -0,0 +1,1112 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Contains routines for printing protocol messages in text format. + +Simple usage example: + + # Create a proto object and serialize it to a text proto string. + message = my_proto_pb2.MyMessage(foo='bar') + text_proto = text_format.MessageToString(message) + + # Parse a text proto string. + message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import io +import re + +import six + +if six.PY3: + long = int + +from google.protobuf.internal import type_checkers +from google.protobuf import descriptor +from google.protobuf import text_encoding + +__all__ = ['MessageToString', 'PrintMessage', 'PrintField', + 'PrintFieldValue', 'Merge'] + + +_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), + type_checkers.Int32ValueChecker(), + type_checkers.Uint64ValueChecker(), + type_checkers.Int64ValueChecker()) +_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE) +_FLOAT_NAN = re.compile('nanf?', re.IGNORECASE) +_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, + descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) +_QUOTES = frozenset(("'", '"')) + + +class Error(Exception): + """Top-level module error for text_format.""" + + +class ParseError(Error): + """Thrown in case of text parsing error.""" + + +class TextWriter(object): + def __init__(self, as_utf8): + if six.PY2: + self._writer = io.BytesIO() + else: + self._writer = io.StringIO() + + def write(self, val): + if six.PY2: + if isinstance(val, six.text_type): + val = val.encode('utf-8') + return self._writer.write(val) + + def close(self): + return self._writer.close() + + def getvalue(self): + return self._writer.getvalue() + + +def MessageToString(message, as_utf8=False, as_one_line=False, + pointy_brackets=False, use_index_order=False, + float_format=None): + """Convert protobuf message to text format. + + Floating point values can be formatted compactly with 15 digits of + precision (which is the most that IEEE 754 "double" can guarantee) + using float_format='.15g'. To ensure that converting to text and back to a + proto will result in an identical value, float_format='.17g' should be used. + + Args: + message: The protocol buffers message. + as_utf8: Produce text output in UTF8 format. + as_one_line: Don't introduce newlines between fields. + pointy_brackets: If True, use angle brackets instead of curly braces for + nesting. + use_index_order: If True, print fields of a proto message using the order + defined in source code instead of the field number. By default, use the + field number order. + float_format: If set, use this to specify floating point number formatting + (per the "Format Specification Mini-Language"); otherwise, str() is used. + + Returns: + A string of the text formatted protocol buffer message. + """ + out = TextWriter(as_utf8) + PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format) + result = out.getvalue() + out.close() + if as_one_line: + return result.rstrip() + return result + + +def _IsMapEntry(field): + return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.message_type.has_options and + field.message_type.GetOptions().map_entry) + + +def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False, + pointy_brackets=False, use_index_order=False, + float_format=None): + fields = message.ListFields() + if use_index_order: + fields.sort(key=lambda x: x[0].index) + for field, value in fields: + if _IsMapEntry(field): + for key in sorted(value): + # This is slow for maps with submessage entires because it copies the + # entire tree. Unfortunately this would take significant refactoring + # of this file to work around. + # + # TODO(haberman): refactor and optimize if this becomes an issue. + entry_submsg = field.message_type._concrete_class( + key=key, value=value[key]) + PrintField(field, entry_submsg, out, indent, as_utf8, as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, float_format=float_format) + elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + for element in value: + PrintField(field, element, out, indent, as_utf8, as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format) + else: + PrintField(field, value, out, indent, as_utf8, as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format) + + +def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False, + pointy_brackets=False, use_index_order=False, float_format=None): + """Print a single field name/value pair. For repeated fields, the value + should be a single element. + """ + + out.write(' ' * indent) + if field.is_extension: + out.write('[') + if (field.containing_type.GetOptions().message_set_wire_format and + field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and + field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): + out.write(field.message_type.full_name) + else: + out.write(field.full_name) + out.write(']') + elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: + # For groups, use the capitalized name. + out.write(field.message_type.name) + else: + out.write(field.name) + + if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + # The colon is optional in this case, but our cross-language golden files + # don't include it. + out.write(': ') + + PrintFieldValue(field, value, out, indent, as_utf8, as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format) + if as_one_line: + out.write(' ') + else: + out.write('\n') + + +def PrintFieldValue(field, value, out, indent=0, as_utf8=False, + as_one_line=False, pointy_brackets=False, + use_index_order=False, + float_format=None): + """Print a single field value (not including name). For repeated fields, + the value should be a single element.""" + + if pointy_brackets: + openb = '<' + closeb = '>' + else: + openb = '{' + closeb = '}' + + if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + if as_one_line: + out.write(' %s ' % openb) + PrintMessage(value, out, indent, as_utf8, as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format) + out.write(closeb) + else: + out.write(' %s\n' % openb) + PrintMessage(value, out, indent + 2, as_utf8, as_one_line, + pointy_brackets=pointy_brackets, + use_index_order=use_index_order, + float_format=float_format) + out.write(' ' * indent + closeb) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: + enum_value = field.enum_type.values_by_number.get(value, None) + if enum_value is not None: + out.write(enum_value.name) + else: + out.write(str(value)) + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: + out.write('\"') + if isinstance(value, six.text_type): + out_value = value.encode('utf-8') + else: + out_value = value + if field.type == descriptor.FieldDescriptor.TYPE_BYTES: + # We need to escape non-UTF8 chars in TYPE_BYTES field. + out_as_utf8 = False + else: + out_as_utf8 = as_utf8 + out.write(text_encoding.CEscape(out_value, out_as_utf8)) + out.write('\"') + elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: + if value: + out.write('true') + else: + out.write('false') + elif field.cpp_type in _FLOAT_TYPES and float_format is not None: + out.write('{1:{0}}'.format(float_format, value)) + else: + out.write(str(value)) + + +def Parse(text, message, allow_unknown_extension=False): + """Parses an text representation of a protocol message into a message. + + Args: + text: Message text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + if not isinstance(text, str): + text = text.decode('utf-8') + return ParseLines(text.split('\n'), message, allow_unknown_extension) + + +def Merge(text, message, allow_unknown_extension=False): + """Parses an text representation of a protocol message into a message. + + Like Parse(), but allows repeated values for a non-repeated field, and uses + the last one. + + Args: + text: Message text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + return MergeLines(text.split('\n'), message, allow_unknown_extension) + + +def ParseLines(lines, message, allow_unknown_extension=False): + """Parses an text representation of a protocol message into a message. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + _ParseOrMerge(lines, message, False, allow_unknown_extension) + return message + + +def MergeLines(lines, message, allow_unknown_extension=False): + """Parses an text representation of a protocol message into a message. + + Args: + lines: An iterable of lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + + Returns: + The same message passed as argument. + + Raises: + ParseError: On text parsing problems. + """ + _ParseOrMerge(lines, message, True, allow_unknown_extension) + return message + + +def _ParseOrMerge(lines, + message, + allow_multiple_scalars, + allow_unknown_extension=False): + """Converts an text representation of a protocol message into a message. + + Args: + lines: Lines of a message's text representation. + message: A protocol buffer message to merge into. + allow_multiple_scalars: Determines if repeated values for a non-repeated + field are permitted, e.g., the string "foo: 1 foo: 2" for a + required/optional field named "foo". + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + + Raises: + ParseError: On text parsing problems. + """ + tokenizer = _Tokenizer(lines) + while not tokenizer.AtEnd(): + _MergeField(tokenizer, message, allow_multiple_scalars, + allow_unknown_extension) + + +def _MergeField(tokenizer, + message, + allow_multiple_scalars, + allow_unknown_extension=False): + """Merges a single protocol message field into a message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + message: A protocol message to record the data. + allow_multiple_scalars: Determines if repeated values for a non-repeated + field are permitted, e.g., the string "foo: 1 foo: 2" for a + required/optional field named "foo". + allow_unknown_extension: if True, skip over missing extensions and keep + parsing + + Raises: + ParseError: In case of text parsing problems. + """ + message_descriptor = message.DESCRIPTOR + if (hasattr(message_descriptor, 'syntax') and + message_descriptor.syntax == 'proto3'): + # Proto3 doesn't represent presence so we can't test if multiple + # scalars have occurred. We have to allow them. + allow_multiple_scalars = True + if tokenizer.TryConsume('['): + name = [tokenizer.ConsumeIdentifier()] + while tokenizer.TryConsume('.'): + name.append(tokenizer.ConsumeIdentifier()) + name = '.'.join(name) + + if not message_descriptor.is_extendable: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" does not have extensions.' % + message_descriptor.full_name) + # pylint: disable=protected-access + field = message.Extensions._FindExtensionByName(name) + # pylint: enable=protected-access + if not field: + if allow_unknown_extension: + field = None + else: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" not registered.' % name) + elif message_descriptor != field.containing_type: + raise tokenizer.ParseErrorPreviousToken( + 'Extension "%s" does not extend message type "%s".' % ( + name, message_descriptor.full_name)) + + tokenizer.Consume(']') + + else: + name = tokenizer.ConsumeIdentifier() + field = message_descriptor.fields_by_name.get(name, None) + + # Group names are expected to be capitalized as they appear in the + # .proto file, which actually matches their type names, not their field + # names. + if not field: + field = message_descriptor.fields_by_name.get(name.lower(), None) + if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: + field = None + + if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and + field.message_type.name != name): + field = None + + if not field: + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" has no field named "%s".' % ( + message_descriptor.full_name, name)) + + if field and field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + is_map_entry = _IsMapEntry(field) + tokenizer.TryConsume(':') + + if tokenizer.TryConsume('<'): + end_token = '>' + else: + tokenizer.Consume('{') + end_token = '}' + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + sub_message = message.Extensions[field].add() + elif is_map_entry: + sub_message = field.message_type._concrete_class() + else: + sub_message = getattr(message, field.name).add() + else: + if field.is_extension: + sub_message = message.Extensions[field] + else: + sub_message = getattr(message, field.name) + sub_message.SetInParent() + + while not tokenizer.TryConsume(end_token): + if tokenizer.AtEnd(): + raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token)) + _MergeField(tokenizer, sub_message, allow_multiple_scalars, + allow_unknown_extension) + + if is_map_entry: + value_cpptype = field.message_type.fields_by_name['value'].cpp_type + if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: + value = getattr(message, field.name)[sub_message.key] + value.MergeFrom(sub_message.value) + else: + getattr(message, field.name)[sub_message.key] = sub_message.value + elif field: + tokenizer.Consume(':') + if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and + tokenizer.TryConsume('[')): + # Short repeated format, e.g. "foo: [1, 2, 3]" + while True: + _MergeScalarField(tokenizer, message, field, allow_multiple_scalars) + if tokenizer.TryConsume(']'): + break + tokenizer.Consume(',') + else: + _MergeScalarField(tokenizer, message, field, allow_multiple_scalars) + else: # Proto field is unknown. + assert allow_unknown_extension + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldContents(tokenizer): + """Skips over contents (value or message) of a field. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + # Try to guess the type of this field. + # If this field is not a message, there should be a ":" between the + # field name and the field value and also the field value should not + # start with "{" or "<" which indicates the beginning of a message body. + # If there is no ":" or there is a "{" or "<" after ":", this field has + # to be a message or the input is ill-formed. + if tokenizer.TryConsume(':') and not tokenizer.LookingAt( + '{') and not tokenizer.LookingAt('<'): + _SkipFieldValue(tokenizer) + else: + _SkipFieldMessage(tokenizer) + + +def _SkipField(tokenizer): + """Skips over a complete field (name and value/message). + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + if tokenizer.TryConsume('['): + # Consume extension name. + tokenizer.ConsumeIdentifier() + while tokenizer.TryConsume('.'): + tokenizer.ConsumeIdentifier() + tokenizer.Consume(']') + else: + tokenizer.ConsumeIdentifier() + + _SkipFieldContents(tokenizer) + + # For historical reasons, fields may optionally be separated by commas or + # semicolons. + if not tokenizer.TryConsume(','): + tokenizer.TryConsume(';') + + +def _SkipFieldMessage(tokenizer): + """Skips over a field message. + + Args: + tokenizer: A tokenizer to parse the field name and values. + """ + + if tokenizer.TryConsume('<'): + delimiter = '>' + else: + tokenizer.Consume('{') + delimiter = '}' + + while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): + _SkipField(tokenizer) + + tokenizer.Consume(delimiter) + + +def _SkipFieldValue(tokenizer): + """Skips over a field value. + + Args: + tokenizer: A tokenizer to parse the field name and values. + + Raises: + ParseError: In case an invalid field value is found. + """ + # String tokens can come in multiple adjacent string literals. + # If we can consume one, consume as many as we can. + if tokenizer.TryConsumeString(): + while tokenizer.TryConsumeString(): + pass + return + + if (not tokenizer.TryConsumeIdentifier() and + not tokenizer.TryConsumeInt64() and + not tokenizer.TryConsumeUint64() and + not tokenizer.TryConsumeFloat()): + raise ParseError('Invalid field value: ' + tokenizer.token) + + +def _MergeScalarField(tokenizer, message, field, allow_multiple_scalars): + """Merges a single protocol message scalar field into a message. + + Args: + tokenizer: A tokenizer to parse the field value. + message: A protocol message to record the data. + field: The descriptor of the field to be merged. + allow_multiple_scalars: Determines if repeated values for a non-repeated + field are permitted, e.g., the string "foo: 1 foo: 2" for a + required/optional field named "foo". + + Raises: + ParseError: In case of text parsing problems. + RuntimeError: On runtime errors. + """ + value = None + + if field.type in (descriptor.FieldDescriptor.TYPE_INT32, + descriptor.FieldDescriptor.TYPE_SINT32, + descriptor.FieldDescriptor.TYPE_SFIXED32): + value = tokenizer.ConsumeInt32() + elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, + descriptor.FieldDescriptor.TYPE_SINT64, + descriptor.FieldDescriptor.TYPE_SFIXED64): + value = tokenizer.ConsumeInt64() + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, + descriptor.FieldDescriptor.TYPE_FIXED32): + value = tokenizer.ConsumeUint32() + elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, + descriptor.FieldDescriptor.TYPE_FIXED64): + value = tokenizer.ConsumeUint64() + elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, + descriptor.FieldDescriptor.TYPE_DOUBLE): + value = tokenizer.ConsumeFloat() + elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: + value = tokenizer.ConsumeBool() + elif field.type == descriptor.FieldDescriptor.TYPE_STRING: + value = tokenizer.ConsumeString() + elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: + value = tokenizer.ConsumeByteString() + elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: + value = tokenizer.ConsumeEnum(field) + else: + raise RuntimeError('Unknown field type %d' % field.type) + + if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: + if field.is_extension: + message.Extensions[field].append(value) + else: + getattr(message, field.name).append(value) + else: + if field.is_extension: + if not allow_multiple_scalars and message.HasExtension(field): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" extensions.' % + (message.DESCRIPTOR.full_name, field.full_name)) + else: + message.Extensions[field] = value + else: + if not allow_multiple_scalars and message.HasField(field.name): + raise tokenizer.ParseErrorPreviousToken( + 'Message type "%s" should not have multiple "%s" fields.' % + (message.DESCRIPTOR.full_name, field.name)) + else: + setattr(message, field.name, value) + + +class _Tokenizer(object): + """Protocol buffer text representation tokenizer. + + This class handles the lower level string parsing by splitting it into + meaningful tokens. + + It was directly ported from the Java protocol buffer API. + """ + + _WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE) + _TOKEN = re.compile('|'.join([ + r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier + r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number + ] + [ # quoted str for each quote mark + r'{qt}([^{qt}\n\\]|\\.)*({qt}|\\?$)'.format(qt=mark) for mark in _QUOTES + ])) + + _IDENTIFIER = re.compile(r'\w+') + + def __init__(self, lines): + self._position = 0 + self._line = -1 + self._column = 0 + self._token_start = None + self.token = '' + self._lines = iter(lines) + self._current_line = '' + self._previous_line = 0 + self._previous_column = 0 + self._more_lines = True + self._SkipWhitespace() + self.NextToken() + + def LookingAt(self, token): + return self.token == token + + def AtEnd(self): + """Checks the end of the text was reached. + + Returns: + True iff the end was reached. + """ + return not self.token + + def _PopLine(self): + while len(self._current_line) <= self._column: + try: + self._current_line = next(self._lines) + except StopIteration: + self._current_line = '' + self._more_lines = False + return + else: + self._line += 1 + self._column = 0 + + def _SkipWhitespace(self): + while True: + self._PopLine() + match = self._WHITESPACE.match(self._current_line, self._column) + if not match: + break + length = len(match.group(0)) + self._column += length + + def TryConsume(self, token): + """Tries to consume a given piece of text. + + Args: + token: Text to consume. + + Returns: + True iff the text was consumed. + """ + if self.token == token: + self.NextToken() + return True + return False + + def Consume(self, token): + """Consumes a piece of text. + + Args: + token: Text to consume. + + Raises: + ParseError: If the text couldn't be consumed. + """ + if not self.TryConsume(token): + raise self._ParseError('Expected "%s".' % token) + + def TryConsumeIdentifier(self): + try: + self.ConsumeIdentifier() + return True + except ParseError: + return False + + def ConsumeIdentifier(self): + """Consumes protocol message field identifier. + + Returns: + Identifier string. + + Raises: + ParseError: If an identifier couldn't be consumed. + """ + result = self.token + if not self._IDENTIFIER.match(result): + raise self._ParseError('Expected identifier.') + self.NextToken() + return result + + def ConsumeInt32(self): + """Consumes a signed 32bit integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 32bit integer couldn't be consumed. + """ + try: + result = ParseInteger(self.token, is_signed=True, is_long=False) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def ConsumeUint32(self): + """Consumes an unsigned 32bit integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 32bit integer couldn't be consumed. + """ + try: + result = ParseInteger(self.token, is_signed=False, is_long=False) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeInt64(self): + try: + self.ConsumeInt64() + return True + except ParseError: + return False + + def ConsumeInt64(self): + """Consumes a signed 64bit integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If a signed 64bit integer couldn't be consumed. + """ + try: + result = ParseInteger(self.token, is_signed=True, is_long=True) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeUint64(self): + try: + self.ConsumeUint64() + return True + except ParseError: + return False + + def ConsumeUint64(self): + """Consumes an unsigned 64bit integer number. + + Returns: + The integer parsed. + + Raises: + ParseError: If an unsigned 64bit integer couldn't be consumed. + """ + try: + result = ParseInteger(self.token, is_signed=False, is_long=True) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeFloat(self): + try: + self.ConsumeFloat() + return True + except ParseError: + return False + + def ConsumeFloat(self): + """Consumes an floating point number. + + Returns: + The number parsed. + + Raises: + ParseError: If a floating point number couldn't be consumed. + """ + try: + result = ParseFloat(self.token) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def ConsumeBool(self): + """Consumes a boolean value. + + Returns: + The bool parsed. + + Raises: + ParseError: If a boolean value couldn't be consumed. + """ + try: + result = ParseBool(self.token) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def TryConsumeString(self): + try: + self.ConsumeString() + return True + except ParseError: + return False + + def ConsumeString(self): + """Consumes a string value. + + Returns: + The string parsed. + + Raises: + ParseError: If a string value couldn't be consumed. + """ + the_bytes = self.ConsumeByteString() + try: + return six.text_type(the_bytes, 'utf-8') + except UnicodeDecodeError as e: + raise self._StringParseError(e) + + def ConsumeByteString(self): + """Consumes a byte array value. + + Returns: + The array parsed (as a string). + + Raises: + ParseError: If a byte array value couldn't be consumed. + """ + the_list = [self._ConsumeSingleByteString()] + while self.token and self.token[0] in _QUOTES: + the_list.append(self._ConsumeSingleByteString()) + return b''.join(the_list) + + def _ConsumeSingleByteString(self): + """Consume one token of a string literal. + + String literals (whether bytes or text) can come in multiple adjacent + tokens which are automatically concatenated, like in C or Python. This + method only consumes one token. + + Returns: + The token parsed. + Raises: + ParseError: When the wrong format data is found. + """ + text = self.token + if len(text) < 1 or text[0] not in _QUOTES: + raise self._ParseError('Expected string but found: %r' % (text,)) + + if len(text) < 2 or text[-1] != text[0]: + raise self._ParseError('String missing ending quote: %r' % (text,)) + + try: + result = text_encoding.CUnescape(text[1:-1]) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def ConsumeEnum(self, field): + try: + result = ParseEnum(field, self.token) + except ValueError as e: + raise self._ParseError(str(e)) + self.NextToken() + return result + + def ParseErrorPreviousToken(self, message): + """Creates and *returns* a ParseError for the previously read token. + + Args: + message: A message to set for the exception. + + Returns: + A ParseError instance. + """ + return ParseError('%d:%d : %s' % ( + self._previous_line + 1, self._previous_column + 1, message)) + + def _ParseError(self, message): + """Creates and *returns* a ParseError for the current token.""" + return ParseError('%d:%d : %s' % ( + self._line + 1, self._column + 1, message)) + + def _StringParseError(self, e): + return self._ParseError('Couldn\'t parse string: ' + str(e)) + + def NextToken(self): + """Reads the next meaningful token.""" + self._previous_line = self._line + self._previous_column = self._column + + self._column += len(self.token) + self._SkipWhitespace() + + if not self._more_lines: + self.token = '' + return + + match = self._TOKEN.match(self._current_line, self._column) + if match: + token = match.group(0) + self.token = token + else: + self.token = self._current_line[self._column] + + +def ParseInteger(text, is_signed=False, is_long=False): + """Parses an integer. + + Args: + text: The text to parse. + is_signed: True if a signed integer must be parsed. + is_long: True if a long integer must be parsed. + + Returns: + The integer value. + + Raises: + ValueError: Thrown Iff the text is not a valid integer. + """ + # Do the actual parsing. Exception handling is propagated to caller. + try: + # We force 32-bit values to int and 64-bit values to long to make + # alternate implementations where the distinction is more significant + # (e.g. the C++ implementation) simpler. + if is_long: + result = long(text, 0) + else: + result = int(text, 0) + except ValueError: + raise ValueError('Couldn\'t parse integer: %s' % text) + + # Check if the integer is sane. Exceptions handled by callers. + checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] + checker.CheckValue(result) + return result + + +def ParseFloat(text): + """Parse a floating point number. + + Args: + text: Text to parse. + + Returns: + The number parsed. + + Raises: + ValueError: If a floating point number couldn't be parsed. + """ + try: + # Assume Python compatible syntax. + return float(text) + except ValueError: + # Check alternative spellings. + if _FLOAT_INFINITY.match(text): + if text[0] == '-': + return float('-inf') + else: + return float('inf') + elif _FLOAT_NAN.match(text): + return float('nan') + else: + # assume '1.0f' format + try: + return float(text.rstrip('f')) + except ValueError: + raise ValueError('Couldn\'t parse float: %s' % text) + + +def ParseBool(text): + """Parse a boolean value. + + Args: + text: Text to parse. + + Returns: + Boolean values parsed + + Raises: + ValueError: If text is not a valid boolean. + """ + if text in ('true', 't', '1'): + return True + elif text in ('false', 'f', '0'): + return False + else: + raise ValueError('Expected "true" or "false".') + + +def ParseEnum(field, value): + """Parse an enum value. + + The value can be specified by a number (the enum value), or by + a string literal (the enum name). + + Args: + field: Enum field descriptor. + value: String value. + + Returns: + Enum value number. + + Raises: + ValueError: If the enum value could not be parsed. + """ + enum_descriptor = field.enum_type + try: + number = int(value, 0) + except ValueError: + # Identifier. + enum_value = enum_descriptor.values_by_name.get(value, None) + if enum_value is None: + raise ValueError( + 'Enum type "%s" has no value named %s.' % ( + enum_descriptor.full_name, value)) + else: + # Numeric value. + enum_value = enum_descriptor.values_by_number.get(number, None) + if enum_value is None: + raise ValueError( + 'Enum type "%s" has no value with number %d.' % ( + enum_descriptor.full_name, number)) + return enum_value.number diff --git a/deps/google/protobuf/timestamp_pb2.py b/deps/google/protobuf/timestamp_pb2.py new file mode 100644 index 00000000..db343a9f --- /dev/null +++ b/deps/google/protobuf/timestamp_pb2.py @@ -0,0 +1,78 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/timestamp.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42T\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TIMESTAMP = _descriptor.Descriptor( + name='Timestamp', + full_name='google.protobuf.Timestamp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='seconds', full_name='google.protobuf.Timestamp.seconds', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nanos', full_name='google.protobuf.Timestamp.nanos', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=52, + serialized_end=95, +) + +DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP + +Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict( + DESCRIPTOR = _TIMESTAMP, + __module__ = 'google.protobuf.timestamp_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Timestamp) + )) +_sym_db.RegisterMessage(Timestamp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\016TimestampProtoP\001\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/type_pb2.py b/deps/google/protobuf/type_pb2.py new file mode 100644 index 00000000..737493f6 --- /dev/null +++ b/deps/google/protobuf/type_pb2.py @@ -0,0 +1,541 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/type.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/type.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42L\n\x13\x63om.google.protobufB\tTypeProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_source__context__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_SYNTAX = _descriptor.EnumDescriptor( + name='Syntax', + full_name='google.protobuf.Syntax', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SYNTAX_PROTO2', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SYNTAX_PROTO3', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1413, + serialized_end=1459, +) +_sym_db.RegisterEnumDescriptor(_SYNTAX) + +Syntax = enum_type_wrapper.EnumTypeWrapper(_SYNTAX) +SYNTAX_PROTO2 = 0 +SYNTAX_PROTO3 = 1 + + +_FIELD_KIND = _descriptor.EnumDescriptor( + name='Kind', + full_name='google.protobuf.Field.Kind', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_DOUBLE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_FLOAT', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_INT64', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT64', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_INT32', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED64', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_FIXED32', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_BOOL', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_STRING', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_GROUP', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_MESSAGE', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_BYTES', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_UINT32', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_ENUM', index=14, number=14, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED32', index=15, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SFIXED64', index=16, number=16, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT32', index=17, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TYPE_SINT64', index=18, number=18, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=610, + serialized_end=938, +) +_sym_db.RegisterEnumDescriptor(_FIELD_KIND) + +_FIELD_CARDINALITY = _descriptor.EnumDescriptor( + name='Cardinality', + full_name='google.protobuf.Field.Cardinality', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CARDINALITY_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CARDINALITY_OPTIONAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CARDINALITY_REQUIRED', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CARDINALITY_REPEATED', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=940, + serialized_end=1056, +) +_sym_db.RegisterEnumDescriptor(_FIELD_CARDINALITY) + + +_TYPE = _descriptor.Descriptor( + name='Type', + full_name='google.protobuf.Type', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Type.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='fields', full_name='google.protobuf.Type.fields', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneofs', full_name='google.protobuf.Type.oneofs', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.Type.options', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='source_context', full_name='google.protobuf.Type.source_context', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.Type.syntax', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=328, +) + + +_FIELD = _descriptor.Descriptor( + name='Field', + full_name='google.protobuf.Field', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='kind', full_name='google.protobuf.Field.kind', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cardinality', full_name='google.protobuf.Field.cardinality', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.Field.number', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Field.name', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type_url', full_name='google.protobuf.Field.type_url', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_index', full_name='google.protobuf.Field.oneof_index', index=5, + number=7, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='packed', full_name='google.protobuf.Field.packed', index=6, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.Field.options', index=7, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='json_name', full_name='google.protobuf.Field.json_name', index=8, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_value', full_name='google.protobuf.Field.default_value', index=9, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _FIELD_KIND, + _FIELD_CARDINALITY, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=331, + serialized_end=1056, +) + + +_ENUM = _descriptor.Descriptor( + name='Enum', + full_name='google.protobuf.Enum', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Enum.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='enumvalue', full_name='google.protobuf.Enum.enumvalue', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.Enum.options', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='source_context', full_name='google.protobuf.Enum.source_context', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='syntax', full_name='google.protobuf.Enum.syntax', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1059, + serialized_end=1265, +) + + +_ENUMVALUE = _descriptor.Descriptor( + name='EnumValue', + full_name='google.protobuf.EnumValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.EnumValue.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.protobuf.EnumValue.number', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='options', full_name='google.protobuf.EnumValue.options', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1267, + serialized_end=1350, +) + + +_OPTION = _descriptor.Descriptor( + name='Option', + full_name='google.protobuf.Option', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.protobuf.Option.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.Option.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1352, + serialized_end=1411, +) + +_TYPE.fields_by_name['fields'].message_type = _FIELD +_TYPE.fields_by_name['options'].message_type = _OPTION +_TYPE.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT +_TYPE.fields_by_name['syntax'].enum_type = _SYNTAX +_FIELD.fields_by_name['kind'].enum_type = _FIELD_KIND +_FIELD.fields_by_name['cardinality'].enum_type = _FIELD_CARDINALITY +_FIELD.fields_by_name['options'].message_type = _OPTION +_FIELD_KIND.containing_type = _FIELD +_FIELD_CARDINALITY.containing_type = _FIELD +_ENUM.fields_by_name['enumvalue'].message_type = _ENUMVALUE +_ENUM.fields_by_name['options'].message_type = _OPTION +_ENUM.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT +_ENUM.fields_by_name['syntax'].enum_type = _SYNTAX +_ENUMVALUE.fields_by_name['options'].message_type = _OPTION +_OPTION.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY +DESCRIPTOR.message_types_by_name['Type'] = _TYPE +DESCRIPTOR.message_types_by_name['Field'] = _FIELD +DESCRIPTOR.message_types_by_name['Enum'] = _ENUM +DESCRIPTOR.message_types_by_name['EnumValue'] = _ENUMVALUE +DESCRIPTOR.message_types_by_name['Option'] = _OPTION +DESCRIPTOR.enum_types_by_name['Syntax'] = _SYNTAX + +Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), dict( + DESCRIPTOR = _TYPE, + __module__ = 'google.protobuf.type_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Type) + )) +_sym_db.RegisterMessage(Type) + +Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), dict( + DESCRIPTOR = _FIELD, + __module__ = 'google.protobuf.type_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Field) + )) +_sym_db.RegisterMessage(Field) + +Enum = _reflection.GeneratedProtocolMessageType('Enum', (_message.Message,), dict( + DESCRIPTOR = _ENUM, + __module__ = 'google.protobuf.type_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Enum) + )) +_sym_db.RegisterMessage(Enum) + +EnumValue = _reflection.GeneratedProtocolMessageType('EnumValue', (_message.Message,), dict( + DESCRIPTOR = _ENUMVALUE, + __module__ = 'google.protobuf.type_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.EnumValue) + )) +_sym_db.RegisterMessage(EnumValue) + +Option = _reflection.GeneratedProtocolMessageType('Option', (_message.Message,), dict( + DESCRIPTOR = _OPTION, + __module__ = 'google.protobuf.type_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Option) + )) +_sym_db.RegisterMessage(Option) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\tTypeProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_arena_pb2.py b/deps/google/protobuf/unittest_arena_pb2.py new file mode 100644 index 00000000..f8431903 --- /dev/null +++ b/deps/google/protobuf/unittest_arena_pb2.py @@ -0,0 +1,121 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_arena.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_no_arena_import_pb2 as google_dot_protobuf_dot_unittest__no__arena__import__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_arena.proto', + package='proto2_arena_unittest', + syntax='proto2', + serialized_pb=_b('\n$google/protobuf/unittest_arena.proto\x12\x15proto2_arena_unittest\x1a.google/protobuf/unittest_no_arena_import.proto\"\x1a\n\rNestedMessage\x12\t\n\x01\x64\x18\x01 \x01(\x05\"\xb2\x01\n\x0c\x41renaMessage\x12\x45\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32$.proto2_arena_unittest.NestedMessage\x12[\n repeated_import_no_arena_message\x18\x02 \x03(\x0b\x32\x31.proto2_arena_unittest.ImportNoArenaNestedMessageB\x03\xf8\x01\x01') + , + dependencies=[google_dot_protobuf_dot_unittest__no__arena__import__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='proto2_arena_unittest.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='d', full_name='proto2_arena_unittest.NestedMessage.d', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=111, + serialized_end=137, +) + + +_ARENAMESSAGE = _descriptor.Descriptor( + name='ArenaMessage', + full_name='proto2_arena_unittest.ArenaMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_nested_message', full_name='proto2_arena_unittest.ArenaMessage.repeated_nested_message', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_import_no_arena_message', full_name='proto2_arena_unittest.ArenaMessage.repeated_import_no_arena_message', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=140, + serialized_end=318, +) + +_ARENAMESSAGE.fields_by_name['repeated_nested_message'].message_type = _NESTEDMESSAGE +_ARENAMESSAGE.fields_by_name['repeated_import_no_arena_message'].message_type = google_dot_protobuf_dot_unittest__no__arena__import__pb2._IMPORTNOARENANESTEDMESSAGE +DESCRIPTOR.message_types_by_name['NestedMessage'] = _NESTEDMESSAGE +DESCRIPTOR.message_types_by_name['ArenaMessage'] = _ARENAMESSAGE + +NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_arena_pb2' + # @@protoc_insertion_point(class_scope:proto2_arena_unittest.NestedMessage) + )) +_sym_db.RegisterMessage(NestedMessage) + +ArenaMessage = _reflection.GeneratedProtocolMessageType('ArenaMessage', (_message.Message,), dict( + DESCRIPTOR = _ARENAMESSAGE, + __module__ = 'google.protobuf.unittest_arena_pb2' + # @@protoc_insertion_point(class_scope:proto2_arena_unittest.ArenaMessage) + )) +_sym_db.RegisterMessage(ArenaMessage) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_custom_options_pb2.py b/deps/google/protobuf/unittest_custom_options_pb2.py new file mode 100644 index 00000000..2d062316 --- /dev/null +++ b/deps/google/protobuf/unittest_custom_options_pb2.py @@ -0,0 +1,1837 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_custom_options.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import service as _service +from google.protobuf import service_reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_custom_options.proto', + package='protobuf_unittest', + syntax='proto2', + serialized_pb=_b('\n-google/protobuf/unittest_custom_options.proto\x12\x11protobuf_unittest\x1a google/protobuf/descriptor.proto\"\x8d\x01\n\x1cTestMessageWithCustomOptions\x12\x1e\n\x06\x66ield1\x18\x01 \x01(\tB\x0e\x08\x01\xc1\xe0\xc3\x1d-\xe1u\n\x02\x00\x00\x00\";\n\x06\x41nEnum\x12\x0f\n\x0b\x41NENUM_VAL1\x10\x01\x12\x16\n\x0b\x41NENUM_VAL2\x10\x02\x1a\x05\xb0\x86\xfa\x05{\x1a\x08\xc5\xf6\xc9\x1d\xeb\xfc\xff\xff:\x10\x08\x00\xe0\xe9\xc2\x1d\xc8\xff\xff\xff\xff\xff\xff\xff\xff\x01\"\x18\n\x16\x43ustomOptionFooRequest\"\x19\n\x17\x43ustomOptionFooResponse\"\x1e\n\x1c\x43ustomOptionFooClientMessage\"\x1e\n\x1c\x43ustomOptionFooServerMessage\"m\n\x1a\x44ummyMessageContainingEnum\"O\n\x0cTestEnumType\x12\x1a\n\x16TEST_OPTION_ENUM_TYPE1\x10\x16\x12#\n\x16TEST_OPTION_ENUM_TYPE2\x10\xe9\xff\xff\xff\xff\xff\xff\xff\xff\x01\"!\n\x1f\x44ummyMessageInvalidAsOptionType\"\x8a\x01\n\x1c\x43ustomOptionMinIntegerValues:j\xd0\xde\xb2\x1d\x00\xe8\xc6\xb2\x1d\x80\x80\x80\x80\xf8\xff\xff\xff\xff\x01\xb0\xbc\xb2\x1d\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01\x80\x93\xb2\x1d\x00\xf8\xf5\xb0\x1d\x00\x80\xc4\xb0\x1d\xff\xff\xff\xff\x0f\xf8\x97\xb0\x1d\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x9d\xf5\xaf\x1d\x00\x00\x00\x00\x91\xee\xaf\x1d\x00\x00\x00\x00\x00\x00\x00\x00\xad\x8d\xaf\x1d\x00\x00\x00\x80\x99\xd6\xa8\x1d\x00\x00\x00\x00\x00\x00\x00\x80\"\x91\x01\n\x1c\x43ustomOptionMaxIntegerValues:q\xd0\xde\xb2\x1d\x01\xe8\xc6\xb2\x1d\xff\xff\xff\xff\x07\xb0\xbc\xb2\x1d\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x80\x93\xb2\x1d\xff\xff\xff\xff\x0f\xf8\xf5\xb0\x1d\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x80\xc4\xb0\x1d\xfe\xff\xff\xff\x0f\xf8\x97\xb0\x1d\xfe\xff\xff\xff\xff\xff\xff\xff\xff\x01\x9d\xf5\xaf\x1d\xff\xff\xff\xff\x91\xee\xaf\x1d\xff\xff\xff\xff\xff\xff\xff\xff\xad\x8d\xaf\x1d\xff\xff\xff\x7f\x99\xd6\xa8\x1d\xff\xff\xff\xff\xff\xff\xff\x7f\"n\n\x17\x43ustomOptionOtherValues:S\xe8\xc6\xb2\x1d\x9c\xff\xff\xff\xff\xff\xff\xff\xff\x01\xf5\xdf\xa3\x1d\xe7\x87\x45\x41\xe9\xdc\xa2\x1d\xfbY\x8c\x42\xca\xc0\xf3?\xaa\xdc\xa2\x1d\x0eHello, \"World\"\xb2\xd9\xa2\x1d\x0bHello\x00World\x88\xd9\xa2\x1d\xe9\xff\xff\xff\xff\xff\xff\xff\xff\x01\"4\n\x1cSettingRealsFromPositiveInts:\x14\xf5\xdf\xa3\x1d\x00\x00@A\xe9\xdc\xa2\x1d\x00\x00\x00\x00\x00@c@\"4\n\x1cSettingRealsFromNegativeInts:\x14\xf5\xdf\xa3\x1d\x00\x00@\xc1\xe9\xdc\xa2\x1d\x00\x00\x00\x00\x00@c\xc0\"U\n\x12\x43omplexOptionType1\x12\x0b\n\x03\x66oo\x18\x01 \x01(\x05\x12\x0c\n\x04\x66oo2\x18\x02 \x01(\x05\x12\x0c\n\x04\x66oo3\x18\x03 \x01(\x05\x12\x0c\n\x04\x66oo4\x18\x04 \x03(\x05*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"\x8b\x03\n\x12\x43omplexOptionType2\x12\x32\n\x03\x62\x61r\x18\x01 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType1\x12\x0b\n\x03\x62\x61z\x18\x02 \x01(\x05\x12\x46\n\x04\x66red\x18\x03 \x01(\x0b\x32\x38.protobuf_unittest.ComplexOptionType2.ComplexOptionType4\x12H\n\x06\x62\x61rney\x18\x04 \x03(\x0b\x32\x38.protobuf_unittest.ComplexOptionType2.ComplexOptionType4\x1a\x97\x01\n\x12\x43omplexOptionType4\x12\r\n\x05waldo\x18\x01 \x01(\x05\x32r\n\x0c\x63omplex_opt4\x12\x1f.google.protobuf.MessageOptions\x18\x8a\xf5\xd1\x03 \x01(\x0b\x32\x38.protobuf_unittest.ComplexOptionType2.ComplexOptionType4*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"\x9c\x01\n\x12\x43omplexOptionType3\x12\x0b\n\x03qux\x18\x01 \x01(\x05\x12T\n\x12\x63omplexoptiontype5\x18\x02 \x01(\n28.protobuf_unittest.ComplexOptionType3.ComplexOptionType5\x1a#\n\x12\x43omplexOptionType5\x12\r\n\x05plugh\x18\x03 \x01(\x05\"\x1f\n\x0b\x43omplexOpt6\x12\x10\n\x05xyzzy\x18\xdf\xbf\xcf\x03 \x01(\x05\"\xf1\x01\n\x15VariousComplexOptions:\xd7\x01\xa2\xe2\x95\x1d\x02\x08*\xa2\xe2\x95\x1d\x06\xd8\x85\x9e\x1d\xc4\x02\xa2\xe2\x95\x1d\x08\x92\xf5\x9d\x1d\x03\x08\xec\x06\xa2\xe2\x95\x1d\x02 c\xa2\xe2\x95\x1d\x02 X\xaa\xfd\x90\x1d\x03\x10\xdb\x07\xaa\xfd\x90\x1d\x06\xf8\xe6\x97\x1d\x8e\x05\xaa\xfd\x90\x1d\x05\n\x03\x08\xe7\x05\xaa\xfd\x90\x1d\x08\n\x06\xd8\x85\x9e\x1d\xcf\x0f\xaa\xfd\x90\x1d\n\n\x08\x92\xf5\x9d\x1d\x03\x08\xd8\x0f\xaa\xfd\x90\x1d\x08\xc2\xac\x97\x1d\x03\x08\xe5\x05\xaa\xfd\x90\x1d\x0b\xc2\xac\x97\x1d\x06\xd8\x85\x9e\x1d\xce\x0f\xaa\xfd\x90\x1d\r\xc2\xac\x97\x1d\x08\x92\xf5\x9d\x1d\x03\x08\xc9\x10\xd2\xa8\x8f\x1d\x03\x08\xb3\x0f\xaa\xfd\x90\x1d\x05\x1a\x03\x08\xc1\x02\xaa\xfd\x90\x1d\x04\"\x02\x08\x65\xaa\xfd\x90\x1d\x05\"\x03\x08\xd4\x01\xfa\xde\x90\x1d\x02\x08\t\xfa\xde\x90\x1d\x04\x13\x18\x16\x14\xe3\xdc\xfc\x1c\xf8\xfd\xfb\x1c\x18\xe4\xdc\xfc\x1c\"#\n\x13\x41ggregateMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa0\x01\n\x1a\x41ggregateMessageSetElement\x12\t\n\x01s\x18\x01 \x01(\t2w\n\x15message_set_extension\x12&.protobuf_unittest.AggregateMessageSet\x18\xf6\xeb\xae\x07 \x01(\x0b\x32-.protobuf_unittest.AggregateMessageSetElement\"\xfd\x01\n\tAggregate\x12\t\n\x01i\x18\x01 \x01(\x05\x12\t\n\x01s\x18\x02 \x01(\t\x12)\n\x03sub\x18\x03 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate\x12*\n\x04\x66ile\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x34\n\x04mset\x18\x05 \x01(\x0b\x32&.protobuf_unittest.AggregateMessageSet2M\n\x06nested\x12\x1c.google.protobuf.FileOptions\x18\xa7\xd1\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate\"Y\n\x10\x41ggregateMessage\x12)\n\tfieldname\x18\x01 \x01(\x05\x42\x16\xf2\xa1\x87;\x11\x12\x0f\x46ieldAnnotation:\x1a\xc2\xd1\x86;\x15\x08\x65\x12\x11MessageAnnotation\"\xc9\x01\n\x10NestedOptionType\x1a;\n\rNestedMessage\x12\"\n\x0cnested_field\x18\x01 \x01(\x05\x42\x0c\xc1\xe0\xc3\x1d\xea\x03\x00\x00\x00\x00\x00\x00:\x06\xe0\xe9\xc2\x1d\xe9\x07\"5\n\nNestedEnum\x12\x1d\n\x11NESTED_ENUM_VALUE\x10\x01\x1a\x06\xb0\x86\xfa\x05\xec\x07\x1a\x08\xc5\xf6\xc9\x1d\xeb\x03\x00\x00\x32\x41\n\x10nested_extension\x12\x1c.google.protobuf.FileOptions\x18\xfd\xf8\xe2\x03 \x01(\x05\x42\x06\xc8\x8b\xca\x1d\xed\x07\"d\n\rOldOptionType\x12\x38\n\x05value\x18\x01 \x02(\x0e\x32).protobuf_unittest.OldOptionType.TestEnum\"\x19\n\x08TestEnum\x12\r\n\tOLD_VALUE\x10\x00\"s\n\rNewOptionType\x12\x38\n\x05value\x18\x01 \x02(\x0e\x32).protobuf_unittest.NewOptionType.TestEnum\"(\n\x08TestEnum\x12\r\n\tOLD_VALUE\x10\x00\x12\r\n\tNEW_VALUE\x10\x01\"-\n!TestMessageWithRequiredEnumOption:\x08\xfa\xe8\xfc\x94\x03\x02\x08\x00*6\n\nMethodOpt1\x12\x13\n\x0fMETHODOPT1_VAL1\x10\x01\x12\x13\n\x0fMETHODOPT1_VAL2\x10\x02*M\n\rAggregateEnum\x12%\n\x05VALUE\x10\x01\x1a\x1a\xca\xfc\x89;\x15\x12\x13\x45numValueAnnotation\x1a\x15\x92\x95\x88;\x10\x12\x0e\x45numAnnotation2\x8e\x01\n\x1cTestServiceWithCustomOptions\x12\x63\n\x03\x46oo\x12).protobuf_unittest.CustomOptionFooRequest\x1a*.protobuf_unittest.CustomOptionFooResponse\"\x05\xe0\xfa\x8c\x1e\x02\x1a\t\x90\xb2\x8b\x1e\xd3\xdb\x80\xcbI2\x99\x01\n\x10\x41ggregateService\x12k\n\x06Method\x12#.protobuf_unittest.AggregateMessage\x1a#.protobuf_unittest.AggregateMessage\"\x17\xca\xc8\x96;\x12\x12\x10MethodAnnotation\x1a\x18\xca\xfb\x8e;\x13\x12\x11ServiceAnnotation:2\n\tfile_opt1\x12\x1c.google.protobuf.FileOptions\x18\x8e\x9d\xd8\x03 \x01(\x04:8\n\x0cmessage_opt1\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xad\xd8\x03 \x01(\x05:4\n\nfield_opt1\x12\x1d.google.protobuf.FieldOptions\x18\x88\xbc\xd8\x03 \x01(\x06:8\n\nfield_opt2\x12\x1d.google.protobuf.FieldOptions\x18\xb9\xa1\xd9\x03 \x01(\x05:\x02\x34\x32:2\n\tenum_opt1\x12\x1c.google.protobuf.EnumOptions\x18\xe8\x9e\xd9\x03 \x01(\x0f:<\n\x0f\x65num_value_opt1\x12!.google.protobuf.EnumValueOptions\x18\xe6\xa0_ \x01(\x05:8\n\x0cservice_opt1\x12\x1f.google.protobuf.ServiceOptions\x18\xa2\xb6\xe1\x03 \x01(\x12:U\n\x0bmethod_opt1\x12\x1e.google.protobuf.MethodOptions\x18\xac\xcf\xe1\x03 \x01(\x0e\x32\x1d.protobuf_unittest.MethodOpt1:4\n\x08\x62ool_opt\x12\x1f.google.protobuf.MessageOptions\x18\xea\xab\xd6\x03 \x01(\x08:5\n\tint32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xed\xa8\xd6\x03 \x01(\x05:5\n\tint64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xc6\xa7\xd6\x03 \x01(\x03:6\n\nuint32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xb0\xa2\xd6\x03 \x01(\r:6\n\nuint64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xdf\x8e\xd6\x03 \x01(\x04:6\n\nsint32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x88\xd6\x03 \x01(\x11:6\n\nsint64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xff\x82\xd6\x03 \x01(\x12:7\n\x0b\x66ixed32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xd3\xfe\xd5\x03 \x01(\x07:7\n\x0b\x66ixed64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xe2\xfd\xd5\x03 \x01(\x06:8\n\x0csfixed32_opt\x12\x1f.google.protobuf.MessageOptions\x18\xd5\xf1\xd5\x03 \x01(\x0f:8\n\x0csfixed64_opt\x12\x1f.google.protobuf.MessageOptions\x18\xe3\x8a\xd5\x03 \x01(\x10:5\n\tfloat_opt\x12\x1f.google.protobuf.MessageOptions\x18\xfe\xbb\xd4\x03 \x01(\x02:6\n\ndouble_opt\x12\x1f.google.protobuf.MessageOptions\x18\xcd\xab\xd4\x03 \x01(\x01:6\n\nstring_opt\x12\x1f.google.protobuf.MessageOptions\x18\xc5\xab\xd4\x03 \x01(\t:5\n\tbytes_opt\x12\x1f.google.protobuf.MessageOptions\x18\x96\xab\xd4\x03 \x01(\x0c:p\n\x08\x65num_opt\x12\x1f.google.protobuf.MessageOptions\x18\x91\xab\xd4\x03 \x01(\x0e\x32:.protobuf_unittest.DummyMessageContainingEnum.TestEnumType:p\n\x10message_type_opt\x12\x1f.google.protobuf.MessageOptions\x18\xaf\xf2\xd3\x03 \x01(\x0b\x32\x32.protobuf_unittest.DummyMessageInvalidAsOptionType:6\n\x04quux\x12%.protobuf_unittest.ComplexOptionType1\x18\xdb\xe0\xd3\x03 \x01(\x05:^\n\x05\x63orge\x12%.protobuf_unittest.ComplexOptionType1\x18\xd2\xde\xd3\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType3:8\n\x06grault\x12%.protobuf_unittest.ComplexOptionType2\x18\xef\xfc\xd2\x03 \x01(\x05:_\n\x06garply\x12%.protobuf_unittest.ComplexOptionType2\x18\xc8\xf5\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType1:_\n\x0c\x63omplex_opt1\x12\x1f.google.protobuf.MessageOptions\x18\xa4\xdc\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType1:_\n\x0c\x63omplex_opt2\x12\x1f.google.protobuf.MessageOptions\x18\xd5\x8f\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType2:_\n\x0c\x63omplex_opt3\x12\x1f.google.protobuf.MessageOptions\x18\xef\x8b\xd2\x03 \x01(\x0b\x32%.protobuf_unittest.ComplexOptionType3:W\n\x0b\x63omplexopt6\x12\x1f.google.protobuf.MessageOptions\x18\xcc\xcb\xcf\x03 \x01(\n2\x1e.protobuf_unittest.ComplexOpt6:N\n\x07\x66ileopt\x12\x1c.google.protobuf.FileOptions\x18\xcf\xdd\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:P\n\x06msgopt\x12\x1f.google.protobuf.MessageOptions\x18\x98\xea\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:P\n\x08\x66ieldopt\x12\x1d.google.protobuf.FieldOptions\x18\x9e\xf4\xb0\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:N\n\x07\x65numopt\x12\x1c.google.protobuf.EnumOptions\x18\xd2\x82\xb1\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:V\n\nenumvalopt\x12!.google.protobuf.EnumValueOptions\x18\xc9\x9f\xb1\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:T\n\nserviceopt\x12\x1f.google.protobuf.ServiceOptions\x18\xb9\xef\xb1\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:R\n\tmethodopt\x12\x1e.google.protobuf.MethodOptions\x18\x89\xe9\xb2\x07 \x01(\x0b\x32\x1c.protobuf_unittest.Aggregate:_\n\x11required_enum_opt\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xcd\xcf\x32 \x01(\x0b\x32 .protobuf_unittest.OldOptionTypeB\x87\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf0\xe8\xc1\x1d\xea\xad\xc0\xe5$\xfa\xec\x85;p\x08\x64\x12\x0e\x46ileAnnotation\x1a\x16\x12\x14NestedFileAnnotation\"\x1e\xfa\xec\x85;\x19\x12\x17\x46ileExtensionAnnotation*$\x0b\x10\xf6\xeb\xae\x07\x1a\x1b\n\x19\x45mbeddedMessageSetElement\x0c') + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_METHODOPT1 = _descriptor.EnumDescriptor( + name='MethodOpt1', + full_name='protobuf_unittest.MethodOpt1', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='METHODOPT1_VAL1', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='METHODOPT1_VAL2', index=1, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2956, + serialized_end=3010, +) +_sym_db.RegisterEnumDescriptor(_METHODOPT1) + +MethodOpt1 = enum_type_wrapper.EnumTypeWrapper(_METHODOPT1) +_AGGREGATEENUM = _descriptor.EnumDescriptor( + name='AggregateEnum', + full_name='protobuf_unittest.AggregateEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VALUE', index=0, number=1, + options=_descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\312\374\211;\025\022\023EnumValueAnnotation')), + type=None), + ], + containing_type=None, + options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\222\225\210;\020\022\016EnumAnnotation')), + serialized_start=3012, + serialized_end=3089, +) +_sym_db.RegisterEnumDescriptor(_AGGREGATEENUM) + +AggregateEnum = enum_type_wrapper.EnumTypeWrapper(_AGGREGATEENUM) +METHODOPT1_VAL1 = 1 +METHODOPT1_VAL2 = 2 +VALUE = 1 + +FILE_OPT1_FIELD_NUMBER = 7736974 +file_opt1 = _descriptor.FieldDescriptor( + name='file_opt1', full_name='protobuf_unittest.file_opt1', index=0, + number=7736974, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +MESSAGE_OPT1_FIELD_NUMBER = 7739036 +message_opt1 = _descriptor.FieldDescriptor( + name='message_opt1', full_name='protobuf_unittest.message_opt1', index=1, + number=7739036, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FIELD_OPT1_FIELD_NUMBER = 7740936 +field_opt1 = _descriptor.FieldDescriptor( + name='field_opt1', full_name='protobuf_unittest.field_opt1', index=2, + number=7740936, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FIELD_OPT2_FIELD_NUMBER = 7753913 +field_opt2 = _descriptor.FieldDescriptor( + name='field_opt2', full_name='protobuf_unittest.field_opt2', index=3, + number=7753913, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=42, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ENUM_OPT1_FIELD_NUMBER = 7753576 +enum_opt1 = _descriptor.FieldDescriptor( + name='enum_opt1', full_name='protobuf_unittest.enum_opt1', index=4, + number=7753576, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ENUM_VALUE_OPT1_FIELD_NUMBER = 1560678 +enum_value_opt1 = _descriptor.FieldDescriptor( + name='enum_value_opt1', full_name='protobuf_unittest.enum_value_opt1', index=5, + number=1560678, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SERVICE_OPT1_FIELD_NUMBER = 7887650 +service_opt1 = _descriptor.FieldDescriptor( + name='service_opt1', full_name='protobuf_unittest.service_opt1', index=6, + number=7887650, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +METHOD_OPT1_FIELD_NUMBER = 7890860 +method_opt1 = _descriptor.FieldDescriptor( + name='method_opt1', full_name='protobuf_unittest.method_opt1', index=7, + number=7890860, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +BOOL_OPT_FIELD_NUMBER = 7706090 +bool_opt = _descriptor.FieldDescriptor( + name='bool_opt', full_name='protobuf_unittest.bool_opt', index=8, + number=7706090, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +INT32_OPT_FIELD_NUMBER = 7705709 +int32_opt = _descriptor.FieldDescriptor( + name='int32_opt', full_name='protobuf_unittest.int32_opt', index=9, + number=7705709, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +INT64_OPT_FIELD_NUMBER = 7705542 +int64_opt = _descriptor.FieldDescriptor( + name='int64_opt', full_name='protobuf_unittest.int64_opt', index=10, + number=7705542, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +UINT32_OPT_FIELD_NUMBER = 7704880 +uint32_opt = _descriptor.FieldDescriptor( + name='uint32_opt', full_name='protobuf_unittest.uint32_opt', index=11, + number=7704880, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +UINT64_OPT_FIELD_NUMBER = 7702367 +uint64_opt = _descriptor.FieldDescriptor( + name='uint64_opt', full_name='protobuf_unittest.uint64_opt', index=12, + number=7702367, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SINT32_OPT_FIELD_NUMBER = 7701568 +sint32_opt = _descriptor.FieldDescriptor( + name='sint32_opt', full_name='protobuf_unittest.sint32_opt', index=13, + number=7701568, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SINT64_OPT_FIELD_NUMBER = 7700863 +sint64_opt = _descriptor.FieldDescriptor( + name='sint64_opt', full_name='protobuf_unittest.sint64_opt', index=14, + number=7700863, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FIXED32_OPT_FIELD_NUMBER = 7700307 +fixed32_opt = _descriptor.FieldDescriptor( + name='fixed32_opt', full_name='protobuf_unittest.fixed32_opt', index=15, + number=7700307, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FIXED64_OPT_FIELD_NUMBER = 7700194 +fixed64_opt = _descriptor.FieldDescriptor( + name='fixed64_opt', full_name='protobuf_unittest.fixed64_opt', index=16, + number=7700194, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SFIXED32_OPT_FIELD_NUMBER = 7698645 +sfixed32_opt = _descriptor.FieldDescriptor( + name='sfixed32_opt', full_name='protobuf_unittest.sfixed32_opt', index=17, + number=7698645, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SFIXED64_OPT_FIELD_NUMBER = 7685475 +sfixed64_opt = _descriptor.FieldDescriptor( + name='sfixed64_opt', full_name='protobuf_unittest.sfixed64_opt', index=18, + number=7685475, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FLOAT_OPT_FIELD_NUMBER = 7675390 +float_opt = _descriptor.FieldDescriptor( + name='float_opt', full_name='protobuf_unittest.float_opt', index=19, + number=7675390, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DOUBLE_OPT_FIELD_NUMBER = 7673293 +double_opt = _descriptor.FieldDescriptor( + name='double_opt', full_name='protobuf_unittest.double_opt', index=20, + number=7673293, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +STRING_OPT_FIELD_NUMBER = 7673285 +string_opt = _descriptor.FieldDescriptor( + name='string_opt', full_name='protobuf_unittest.string_opt', index=21, + number=7673285, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +BYTES_OPT_FIELD_NUMBER = 7673238 +bytes_opt = _descriptor.FieldDescriptor( + name='bytes_opt', full_name='protobuf_unittest.bytes_opt', index=22, + number=7673238, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ENUM_OPT_FIELD_NUMBER = 7673233 +enum_opt = _descriptor.FieldDescriptor( + name='enum_opt', full_name='protobuf_unittest.enum_opt', index=23, + number=7673233, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=22, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +MESSAGE_TYPE_OPT_FIELD_NUMBER = 7665967 +message_type_opt = _descriptor.FieldDescriptor( + name='message_type_opt', full_name='protobuf_unittest.message_type_opt', index=24, + number=7665967, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +QUUX_FIELD_NUMBER = 7663707 +quux = _descriptor.FieldDescriptor( + name='quux', full_name='protobuf_unittest.quux', index=25, + number=7663707, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +CORGE_FIELD_NUMBER = 7663442 +corge = _descriptor.FieldDescriptor( + name='corge', full_name='protobuf_unittest.corge', index=26, + number=7663442, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +GRAULT_FIELD_NUMBER = 7650927 +grault = _descriptor.FieldDescriptor( + name='grault', full_name='protobuf_unittest.grault', index=27, + number=7650927, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +GARPLY_FIELD_NUMBER = 7649992 +garply = _descriptor.FieldDescriptor( + name='garply', full_name='protobuf_unittest.garply', index=28, + number=7649992, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +COMPLEX_OPT1_FIELD_NUMBER = 7646756 +complex_opt1 = _descriptor.FieldDescriptor( + name='complex_opt1', full_name='protobuf_unittest.complex_opt1', index=29, + number=7646756, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +COMPLEX_OPT2_FIELD_NUMBER = 7636949 +complex_opt2 = _descriptor.FieldDescriptor( + name='complex_opt2', full_name='protobuf_unittest.complex_opt2', index=30, + number=7636949, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +COMPLEX_OPT3_FIELD_NUMBER = 7636463 +complex_opt3 = _descriptor.FieldDescriptor( + name='complex_opt3', full_name='protobuf_unittest.complex_opt3', index=31, + number=7636463, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +COMPLEXOPT6_FIELD_NUMBER = 7595468 +complexopt6 = _descriptor.FieldDescriptor( + name='complexopt6', full_name='protobuf_unittest.complexopt6', index=32, + number=7595468, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FILEOPT_FIELD_NUMBER = 15478479 +fileopt = _descriptor.FieldDescriptor( + name='fileopt', full_name='protobuf_unittest.fileopt', index=33, + number=15478479, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +MSGOPT_FIELD_NUMBER = 15480088 +msgopt = _descriptor.FieldDescriptor( + name='msgopt', full_name='protobuf_unittest.msgopt', index=34, + number=15480088, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +FIELDOPT_FIELD_NUMBER = 15481374 +fieldopt = _descriptor.FieldDescriptor( + name='fieldopt', full_name='protobuf_unittest.fieldopt', index=35, + number=15481374, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ENUMOPT_FIELD_NUMBER = 15483218 +enumopt = _descriptor.FieldDescriptor( + name='enumopt', full_name='protobuf_unittest.enumopt', index=36, + number=15483218, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ENUMVALOPT_FIELD_NUMBER = 15486921 +enumvalopt = _descriptor.FieldDescriptor( + name='enumvalopt', full_name='protobuf_unittest.enumvalopt', index=37, + number=15486921, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +SERVICEOPT_FIELD_NUMBER = 15497145 +serviceopt = _descriptor.FieldDescriptor( + name='serviceopt', full_name='protobuf_unittest.serviceopt', index=38, + number=15497145, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +METHODOPT_FIELD_NUMBER = 15512713 +methodopt = _descriptor.FieldDescriptor( + name='methodopt', full_name='protobuf_unittest.methodopt', index=39, + number=15512713, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REQUIRED_ENUM_OPT_FIELD_NUMBER = 106161807 +required_enum_opt = _descriptor.FieldDescriptor( + name='required_enum_opt', full_name='protobuf_unittest.required_enum_opt', index=40, + number=106161807, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + +_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM = _descriptor.EnumDescriptor( + name='AnEnum', + full_name='protobuf_unittest.TestMessageWithCustomOptions.AnEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ANENUM_VAL1', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ANENUM_VAL2', index=1, number=2, + options=_descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005{')), + type=None), + ], + containing_type=None, + options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\374\377\377')), + serialized_start=167, + serialized_end=226, +) +_sym_db.RegisterEnumDescriptor(_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM) + +_DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE = _descriptor.EnumDescriptor( + name='TestEnumType', + full_name='protobuf_unittest.DummyMessageContainingEnum.TestEnumType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TEST_OPTION_ENUM_TYPE1', index=0, number=22, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TEST_OPTION_ENUM_TYPE2', index=1, number=-23, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=393, + serialized_end=472, +) +_sym_db.RegisterEnumDescriptor(_DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE) + +_NESTEDOPTIONTYPE_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='protobuf_unittest.NestedOptionType.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NESTED_ENUM_VALUE', index=0, number=1, + options=_descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005\354\007')), + type=None), + ], + containing_type=None, + options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\003\000\000')), + serialized_start=2568, + serialized_end=2621, +) +_sym_db.RegisterEnumDescriptor(_NESTEDOPTIONTYPE_NESTEDENUM) + +_OLDOPTIONTYPE_TESTENUM = _descriptor.EnumDescriptor( + name='TestEnum', + full_name='protobuf_unittest.OldOptionType.TestEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OLD_VALUE', index=0, number=0, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2765, + serialized_end=2790, +) +_sym_db.RegisterEnumDescriptor(_OLDOPTIONTYPE_TESTENUM) + +_NEWOPTIONTYPE_TESTENUM = _descriptor.EnumDescriptor( + name='TestEnum', + full_name='protobuf_unittest.NewOptionType.TestEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='OLD_VALUE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEW_VALUE', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2867, + serialized_end=2907, +) +_sym_db.RegisterEnumDescriptor(_NEWOPTIONTYPE_TESTENUM) + + +_TESTMESSAGEWITHCUSTOMOPTIONS = _descriptor.Descriptor( + name='TestMessageWithCustomOptions', + full_name='protobuf_unittest.TestMessageWithCustomOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field1', full_name='protobuf_unittest.TestMessageWithCustomOptions.field1', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001\301\340\303\035-\341u\n\002\000\000\000'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM, + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\000\340\351\302\035\310\377\377\377\377\377\377\377\377\001')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=103, + serialized_end=244, +) + + +_CUSTOMOPTIONFOOREQUEST = _descriptor.Descriptor( + name='CustomOptionFooRequest', + full_name='protobuf_unittest.CustomOptionFooRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=246, + serialized_end=270, +) + + +_CUSTOMOPTIONFOORESPONSE = _descriptor.Descriptor( + name='CustomOptionFooResponse', + full_name='protobuf_unittest.CustomOptionFooResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=272, + serialized_end=297, +) + + +_CUSTOMOPTIONFOOCLIENTMESSAGE = _descriptor.Descriptor( + name='CustomOptionFooClientMessage', + full_name='protobuf_unittest.CustomOptionFooClientMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=299, + serialized_end=329, +) + + +_CUSTOMOPTIONFOOSERVERMESSAGE = _descriptor.Descriptor( + name='CustomOptionFooServerMessage', + full_name='protobuf_unittest.CustomOptionFooServerMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=331, + serialized_end=361, +) + + +_DUMMYMESSAGECONTAININGENUM = _descriptor.Descriptor( + name='DummyMessageContainingEnum', + full_name='protobuf_unittest.DummyMessageContainingEnum', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=363, + serialized_end=472, +) + + +_DUMMYMESSAGEINVALIDASOPTIONTYPE = _descriptor.Descriptor( + name='DummyMessageInvalidAsOptionType', + full_name='protobuf_unittest.DummyMessageInvalidAsOptionType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=474, + serialized_end=507, +) + + +_CUSTOMOPTIONMININTEGERVALUES = _descriptor.Descriptor( + name='CustomOptionMinIntegerValues', + full_name='protobuf_unittest.CustomOptionMinIntegerValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\000\350\306\262\035\200\200\200\200\370\377\377\377\377\001\260\274\262\035\200\200\200\200\200\200\200\200\200\001\200\223\262\035\000\370\365\260\035\000\200\304\260\035\377\377\377\377\017\370\227\260\035\377\377\377\377\377\377\377\377\377\001\235\365\257\035\000\000\000\000\221\356\257\035\000\000\000\000\000\000\000\000\255\215\257\035\000\000\000\200\231\326\250\035\000\000\000\000\000\000\000\200')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=510, + serialized_end=648, +) + + +_CUSTOMOPTIONMAXINTEGERVALUES = _descriptor.Descriptor( + name='CustomOptionMaxIntegerValues', + full_name='protobuf_unittest.CustomOptionMaxIntegerValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\001\350\306\262\035\377\377\377\377\007\260\274\262\035\377\377\377\377\377\377\377\377\177\200\223\262\035\377\377\377\377\017\370\365\260\035\377\377\377\377\377\377\377\377\377\001\200\304\260\035\376\377\377\377\017\370\227\260\035\376\377\377\377\377\377\377\377\377\001\235\365\257\035\377\377\377\377\221\356\257\035\377\377\377\377\377\377\377\377\255\215\257\035\377\377\377\177\231\326\250\035\377\377\377\377\377\377\377\177')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=651, + serialized_end=796, +) + + +_CUSTOMOPTIONOTHERVALUES = _descriptor.Descriptor( + name='CustomOptionOtherValues', + full_name='protobuf_unittest.CustomOptionOtherValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\350\306\262\035\234\377\377\377\377\377\377\377\377\001\365\337\243\035\347\207EA\351\334\242\035\373Y\214B\312\300\363?\252\334\242\035\016Hello, \"World\"\262\331\242\035\013Hello\000World\210\331\242\035\351\377\377\377\377\377\377\377\377\001')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=798, + serialized_end=908, +) + + +_SETTINGREALSFROMPOSITIVEINTS = _descriptor.Descriptor( + name='SettingRealsFromPositiveInts', + full_name='protobuf_unittest.SettingRealsFromPositiveInts', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@A\351\334\242\035\000\000\000\000\000@c@')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=910, + serialized_end=962, +) + + +_SETTINGREALSFROMNEGATIVEINTS = _descriptor.Descriptor( + name='SettingRealsFromNegativeInts', + full_name='protobuf_unittest.SettingRealsFromNegativeInts', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@\301\351\334\242\035\000\000\000\000\000@c\300')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=964, + serialized_end=1016, +) + + +_COMPLEXOPTIONTYPE1 = _descriptor.Descriptor( + name='ComplexOptionType1', + full_name='protobuf_unittest.ComplexOptionType1', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foo', full_name='protobuf_unittest.ComplexOptionType1.foo', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo2', full_name='protobuf_unittest.ComplexOptionType1.foo2', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo3', full_name='protobuf_unittest.ComplexOptionType1.foo3', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo4', full_name='protobuf_unittest.ComplexOptionType1.foo4', index=3, + number=4, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(100, 536870912), ], + oneofs=[ + ], + serialized_start=1018, + serialized_end=1103, +) + + +_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4 = _descriptor.Descriptor( + name='ComplexOptionType4', + full_name='protobuf_unittest.ComplexOptionType2.ComplexOptionType4', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='waldo', full_name='protobuf_unittest.ComplexOptionType2.ComplexOptionType4.waldo', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='complex_opt4', full_name='protobuf_unittest.ComplexOptionType2.ComplexOptionType4.complex_opt4', index=0, + number=7633546, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1340, + serialized_end=1491, +) + +_COMPLEXOPTIONTYPE2 = _descriptor.Descriptor( + name='ComplexOptionType2', + full_name='protobuf_unittest.ComplexOptionType2', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bar', full_name='protobuf_unittest.ComplexOptionType2.bar', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='baz', full_name='protobuf_unittest.ComplexOptionType2.baz', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='fred', full_name='protobuf_unittest.ComplexOptionType2.fred', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='barney', full_name='protobuf_unittest.ComplexOptionType2.barney', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4, ], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(100, 536870912), ], + oneofs=[ + ], + serialized_start=1106, + serialized_end=1501, +) + + +_COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5 = _descriptor.Descriptor( + name='ComplexOptionType5', + full_name='protobuf_unittest.ComplexOptionType3.ComplexOptionType5', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='plugh', full_name='protobuf_unittest.ComplexOptionType3.ComplexOptionType5.plugh', index=0, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1625, + serialized_end=1660, +) + +_COMPLEXOPTIONTYPE3 = _descriptor.Descriptor( + name='ComplexOptionType3', + full_name='protobuf_unittest.ComplexOptionType3', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='qux', full_name='protobuf_unittest.ComplexOptionType3.qux', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='complexoptiontype5', full_name='protobuf_unittest.ComplexOptionType3.complexoptiontype5', index=1, + number=2, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1504, + serialized_end=1660, +) + + +_COMPLEXOPT6 = _descriptor.Descriptor( + name='ComplexOpt6', + full_name='protobuf_unittest.ComplexOpt6', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='xyzzy', full_name='protobuf_unittest.ComplexOpt6.xyzzy', index=0, + number=7593951, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1662, + serialized_end=1693, +) + + +_VARIOUSCOMPLEXOPTIONS = _descriptor.Descriptor( + name='VariousComplexOptions', + full_name='protobuf_unittest.VariousComplexOptions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\242\342\225\035\002\010*\242\342\225\035\006\330\205\236\035\304\002\242\342\225\035\010\222\365\235\035\003\010\354\006\242\342\225\035\002 c\242\342\225\035\002 X\252\375\220\035\003\020\333\007\252\375\220\035\006\370\346\227\035\216\005\252\375\220\035\005\n\003\010\347\005\252\375\220\035\010\n\006\330\205\236\035\317\017\252\375\220\035\n\n\010\222\365\235\035\003\010\330\017\252\375\220\035\010\302\254\227\035\003\010\345\005\252\375\220\035\013\302\254\227\035\006\330\205\236\035\316\017\252\375\220\035\r\302\254\227\035\010\222\365\235\035\003\010\311\020\322\250\217\035\003\010\263\017\252\375\220\035\005\032\003\010\301\002\252\375\220\035\004\"\002\010e\252\375\220\035\005\"\003\010\324\001\372\336\220\035\002\010\t\372\336\220\035\004\023\030\026\024\343\334\374\034\370\375\373\034\030\344\334\374\034')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1696, + serialized_end=1937, +) + + +_AGGREGATEMESSAGESET = _descriptor.Descriptor( + name='AggregateMessageSet', + full_name='protobuf_unittest.AggregateMessageSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')), + is_extendable=True, + syntax='proto2', + extension_ranges=[(4, 2147483647), ], + oneofs=[ + ], + serialized_start=1939, + serialized_end=1974, +) + + +_AGGREGATEMESSAGESETELEMENT = _descriptor.Descriptor( + name='AggregateMessageSetElement', + full_name='protobuf_unittest.AggregateMessageSetElement', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='s', full_name='protobuf_unittest.AggregateMessageSetElement.s', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='message_set_extension', full_name='protobuf_unittest.AggregateMessageSetElement.message_set_extension', index=0, + number=15447542, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1977, + serialized_end=2137, +) + + +_AGGREGATE = _descriptor.Descriptor( + name='Aggregate', + full_name='protobuf_unittest.Aggregate', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='i', full_name='protobuf_unittest.Aggregate.i', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='s', full_name='protobuf_unittest.Aggregate.s', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sub', full_name='protobuf_unittest.Aggregate.sub', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='file', full_name='protobuf_unittest.Aggregate.file', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mset', full_name='protobuf_unittest.Aggregate.mset', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='nested', full_name='protobuf_unittest.Aggregate.nested', index=0, + number=15476903, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2140, + serialized_end=2393, +) + + +_AGGREGATEMESSAGE = _descriptor.Descriptor( + name='AggregateMessage', + full_name='protobuf_unittest.AggregateMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fieldname', full_name='protobuf_unittest.AggregateMessage.fieldname', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\362\241\207;\021\022\017FieldAnnotation'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\302\321\206;\025\010e\022\021MessageAnnotation')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2395, + serialized_end=2484, +) + + +_NESTEDOPTIONTYPE_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest.NestedOptionType.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nested_field', full_name='protobuf_unittest.NestedOptionType.NestedMessage.nested_field', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\301\340\303\035\352\003\000\000\000\000\000\000'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\340\351\302\035\351\007')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2507, + serialized_end=2566, +) + +_NESTEDOPTIONTYPE = _descriptor.Descriptor( + name='NestedOptionType', + full_name='protobuf_unittest.NestedOptionType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + _descriptor.FieldDescriptor( + name='nested_extension', full_name='protobuf_unittest.NestedOptionType.nested_extension', index=0, + number=7912573, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\213\312\035\355\007'))), + ], + nested_types=[_NESTEDOPTIONTYPE_NESTEDMESSAGE, ], + enum_types=[ + _NESTEDOPTIONTYPE_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2487, + serialized_end=2688, +) + + +_OLDOPTIONTYPE = _descriptor.Descriptor( + name='OldOptionType', + full_name='protobuf_unittest.OldOptionType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.OldOptionType.value', index=0, + number=1, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _OLDOPTIONTYPE_TESTENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2690, + serialized_end=2790, +) + + +_NEWOPTIONTYPE = _descriptor.Descriptor( + name='NewOptionType', + full_name='protobuf_unittest.NewOptionType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='protobuf_unittest.NewOptionType.value', index=0, + number=1, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _NEWOPTIONTYPE_TESTENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2792, + serialized_end=2907, +) + + +_TESTMESSAGEWITHREQUIREDENUMOPTION = _descriptor.Descriptor( + name='TestMessageWithRequiredEnumOption', + full_name='protobuf_unittest.TestMessageWithRequiredEnumOption', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\372\350\374\224\003\002\010\000')), + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2909, + serialized_end=2954, +) + +_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.containing_type = _TESTMESSAGEWITHCUSTOMOPTIONS +_DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE.containing_type = _DUMMYMESSAGECONTAININGENUM +_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4.containing_type = _COMPLEXOPTIONTYPE2 +_COMPLEXOPTIONTYPE2.fields_by_name['bar'].message_type = _COMPLEXOPTIONTYPE1 +_COMPLEXOPTIONTYPE2.fields_by_name['fred'].message_type = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4 +_COMPLEXOPTIONTYPE2.fields_by_name['barney'].message_type = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4 +_COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5.containing_type = _COMPLEXOPTIONTYPE3 +_COMPLEXOPTIONTYPE3.fields_by_name['complexoptiontype5'].message_type = _COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5 +_AGGREGATE.fields_by_name['sub'].message_type = _AGGREGATE +_AGGREGATE.fields_by_name['file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEOPTIONS +_AGGREGATE.fields_by_name['mset'].message_type = _AGGREGATEMESSAGESET +_NESTEDOPTIONTYPE_NESTEDMESSAGE.containing_type = _NESTEDOPTIONTYPE +_NESTEDOPTIONTYPE_NESTEDENUM.containing_type = _NESTEDOPTIONTYPE +_OLDOPTIONTYPE.fields_by_name['value'].enum_type = _OLDOPTIONTYPE_TESTENUM +_OLDOPTIONTYPE_TESTENUM.containing_type = _OLDOPTIONTYPE +_NEWOPTIONTYPE.fields_by_name['value'].enum_type = _NEWOPTIONTYPE_TESTENUM +_NEWOPTIONTYPE_TESTENUM.containing_type = _NEWOPTIONTYPE +DESCRIPTOR.message_types_by_name['TestMessageWithCustomOptions'] = _TESTMESSAGEWITHCUSTOMOPTIONS +DESCRIPTOR.message_types_by_name['CustomOptionFooRequest'] = _CUSTOMOPTIONFOOREQUEST +DESCRIPTOR.message_types_by_name['CustomOptionFooResponse'] = _CUSTOMOPTIONFOORESPONSE +DESCRIPTOR.message_types_by_name['CustomOptionFooClientMessage'] = _CUSTOMOPTIONFOOCLIENTMESSAGE +DESCRIPTOR.message_types_by_name['CustomOptionFooServerMessage'] = _CUSTOMOPTIONFOOSERVERMESSAGE +DESCRIPTOR.message_types_by_name['DummyMessageContainingEnum'] = _DUMMYMESSAGECONTAININGENUM +DESCRIPTOR.message_types_by_name['DummyMessageInvalidAsOptionType'] = _DUMMYMESSAGEINVALIDASOPTIONTYPE +DESCRIPTOR.message_types_by_name['CustomOptionMinIntegerValues'] = _CUSTOMOPTIONMININTEGERVALUES +DESCRIPTOR.message_types_by_name['CustomOptionMaxIntegerValues'] = _CUSTOMOPTIONMAXINTEGERVALUES +DESCRIPTOR.message_types_by_name['CustomOptionOtherValues'] = _CUSTOMOPTIONOTHERVALUES +DESCRIPTOR.message_types_by_name['SettingRealsFromPositiveInts'] = _SETTINGREALSFROMPOSITIVEINTS +DESCRIPTOR.message_types_by_name['SettingRealsFromNegativeInts'] = _SETTINGREALSFROMNEGATIVEINTS +DESCRIPTOR.message_types_by_name['ComplexOptionType1'] = _COMPLEXOPTIONTYPE1 +DESCRIPTOR.message_types_by_name['ComplexOptionType2'] = _COMPLEXOPTIONTYPE2 +DESCRIPTOR.message_types_by_name['ComplexOptionType3'] = _COMPLEXOPTIONTYPE3 +DESCRIPTOR.message_types_by_name['ComplexOpt6'] = _COMPLEXOPT6 +DESCRIPTOR.message_types_by_name['VariousComplexOptions'] = _VARIOUSCOMPLEXOPTIONS +DESCRIPTOR.message_types_by_name['AggregateMessageSet'] = _AGGREGATEMESSAGESET +DESCRIPTOR.message_types_by_name['AggregateMessageSetElement'] = _AGGREGATEMESSAGESETELEMENT +DESCRIPTOR.message_types_by_name['Aggregate'] = _AGGREGATE +DESCRIPTOR.message_types_by_name['AggregateMessage'] = _AGGREGATEMESSAGE +DESCRIPTOR.message_types_by_name['NestedOptionType'] = _NESTEDOPTIONTYPE +DESCRIPTOR.message_types_by_name['OldOptionType'] = _OLDOPTIONTYPE +DESCRIPTOR.message_types_by_name['NewOptionType'] = _NEWOPTIONTYPE +DESCRIPTOR.message_types_by_name['TestMessageWithRequiredEnumOption'] = _TESTMESSAGEWITHREQUIREDENUMOPTION +DESCRIPTOR.enum_types_by_name['MethodOpt1'] = _METHODOPT1 +DESCRIPTOR.enum_types_by_name['AggregateEnum'] = _AGGREGATEENUM +DESCRIPTOR.extensions_by_name['file_opt1'] = file_opt1 +DESCRIPTOR.extensions_by_name['message_opt1'] = message_opt1 +DESCRIPTOR.extensions_by_name['field_opt1'] = field_opt1 +DESCRIPTOR.extensions_by_name['field_opt2'] = field_opt2 +DESCRIPTOR.extensions_by_name['enum_opt1'] = enum_opt1 +DESCRIPTOR.extensions_by_name['enum_value_opt1'] = enum_value_opt1 +DESCRIPTOR.extensions_by_name['service_opt1'] = service_opt1 +DESCRIPTOR.extensions_by_name['method_opt1'] = method_opt1 +DESCRIPTOR.extensions_by_name['bool_opt'] = bool_opt +DESCRIPTOR.extensions_by_name['int32_opt'] = int32_opt +DESCRIPTOR.extensions_by_name['int64_opt'] = int64_opt +DESCRIPTOR.extensions_by_name['uint32_opt'] = uint32_opt +DESCRIPTOR.extensions_by_name['uint64_opt'] = uint64_opt +DESCRIPTOR.extensions_by_name['sint32_opt'] = sint32_opt +DESCRIPTOR.extensions_by_name['sint64_opt'] = sint64_opt +DESCRIPTOR.extensions_by_name['fixed32_opt'] = fixed32_opt +DESCRIPTOR.extensions_by_name['fixed64_opt'] = fixed64_opt +DESCRIPTOR.extensions_by_name['sfixed32_opt'] = sfixed32_opt +DESCRIPTOR.extensions_by_name['sfixed64_opt'] = sfixed64_opt +DESCRIPTOR.extensions_by_name['float_opt'] = float_opt +DESCRIPTOR.extensions_by_name['double_opt'] = double_opt +DESCRIPTOR.extensions_by_name['string_opt'] = string_opt +DESCRIPTOR.extensions_by_name['bytes_opt'] = bytes_opt +DESCRIPTOR.extensions_by_name['enum_opt'] = enum_opt +DESCRIPTOR.extensions_by_name['message_type_opt'] = message_type_opt +DESCRIPTOR.extensions_by_name['quux'] = quux +DESCRIPTOR.extensions_by_name['corge'] = corge +DESCRIPTOR.extensions_by_name['grault'] = grault +DESCRIPTOR.extensions_by_name['garply'] = garply +DESCRIPTOR.extensions_by_name['complex_opt1'] = complex_opt1 +DESCRIPTOR.extensions_by_name['complex_opt2'] = complex_opt2 +DESCRIPTOR.extensions_by_name['complex_opt3'] = complex_opt3 +DESCRIPTOR.extensions_by_name['complexopt6'] = complexopt6 +DESCRIPTOR.extensions_by_name['fileopt'] = fileopt +DESCRIPTOR.extensions_by_name['msgopt'] = msgopt +DESCRIPTOR.extensions_by_name['fieldopt'] = fieldopt +DESCRIPTOR.extensions_by_name['enumopt'] = enumopt +DESCRIPTOR.extensions_by_name['enumvalopt'] = enumvalopt +DESCRIPTOR.extensions_by_name['serviceopt'] = serviceopt +DESCRIPTOR.extensions_by_name['methodopt'] = methodopt +DESCRIPTOR.extensions_by_name['required_enum_opt'] = required_enum_opt + +TestMessageWithCustomOptions = _reflection.GeneratedProtocolMessageType('TestMessageWithCustomOptions', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGEWITHCUSTOMOPTIONS, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageWithCustomOptions) + )) +_sym_db.RegisterMessage(TestMessageWithCustomOptions) + +CustomOptionFooRequest = _reflection.GeneratedProtocolMessageType('CustomOptionFooRequest', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONFOOREQUEST, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooRequest) + )) +_sym_db.RegisterMessage(CustomOptionFooRequest) + +CustomOptionFooResponse = _reflection.GeneratedProtocolMessageType('CustomOptionFooResponse', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONFOORESPONSE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooResponse) + )) +_sym_db.RegisterMessage(CustomOptionFooResponse) + +CustomOptionFooClientMessage = _reflection.GeneratedProtocolMessageType('CustomOptionFooClientMessage', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONFOOCLIENTMESSAGE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooClientMessage) + )) +_sym_db.RegisterMessage(CustomOptionFooClientMessage) + +CustomOptionFooServerMessage = _reflection.GeneratedProtocolMessageType('CustomOptionFooServerMessage', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONFOOSERVERMESSAGE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionFooServerMessage) + )) +_sym_db.RegisterMessage(CustomOptionFooServerMessage) + +DummyMessageContainingEnum = _reflection.GeneratedProtocolMessageType('DummyMessageContainingEnum', (_message.Message,), dict( + DESCRIPTOR = _DUMMYMESSAGECONTAININGENUM, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.DummyMessageContainingEnum) + )) +_sym_db.RegisterMessage(DummyMessageContainingEnum) + +DummyMessageInvalidAsOptionType = _reflection.GeneratedProtocolMessageType('DummyMessageInvalidAsOptionType', (_message.Message,), dict( + DESCRIPTOR = _DUMMYMESSAGEINVALIDASOPTIONTYPE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.DummyMessageInvalidAsOptionType) + )) +_sym_db.RegisterMessage(DummyMessageInvalidAsOptionType) + +CustomOptionMinIntegerValues = _reflection.GeneratedProtocolMessageType('CustomOptionMinIntegerValues', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONMININTEGERVALUES, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionMinIntegerValues) + )) +_sym_db.RegisterMessage(CustomOptionMinIntegerValues) + +CustomOptionMaxIntegerValues = _reflection.GeneratedProtocolMessageType('CustomOptionMaxIntegerValues', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONMAXINTEGERVALUES, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionMaxIntegerValues) + )) +_sym_db.RegisterMessage(CustomOptionMaxIntegerValues) + +CustomOptionOtherValues = _reflection.GeneratedProtocolMessageType('CustomOptionOtherValues', (_message.Message,), dict( + DESCRIPTOR = _CUSTOMOPTIONOTHERVALUES, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.CustomOptionOtherValues) + )) +_sym_db.RegisterMessage(CustomOptionOtherValues) + +SettingRealsFromPositiveInts = _reflection.GeneratedProtocolMessageType('SettingRealsFromPositiveInts', (_message.Message,), dict( + DESCRIPTOR = _SETTINGREALSFROMPOSITIVEINTS, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.SettingRealsFromPositiveInts) + )) +_sym_db.RegisterMessage(SettingRealsFromPositiveInts) + +SettingRealsFromNegativeInts = _reflection.GeneratedProtocolMessageType('SettingRealsFromNegativeInts', (_message.Message,), dict( + DESCRIPTOR = _SETTINGREALSFROMNEGATIVEINTS, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.SettingRealsFromNegativeInts) + )) +_sym_db.RegisterMessage(SettingRealsFromNegativeInts) + +ComplexOptionType1 = _reflection.GeneratedProtocolMessageType('ComplexOptionType1', (_message.Message,), dict( + DESCRIPTOR = _COMPLEXOPTIONTYPE1, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType1) + )) +_sym_db.RegisterMessage(ComplexOptionType1) + +ComplexOptionType2 = _reflection.GeneratedProtocolMessageType('ComplexOptionType2', (_message.Message,), dict( + + ComplexOptionType4 = _reflection.GeneratedProtocolMessageType('ComplexOptionType4', (_message.Message,), dict( + DESCRIPTOR = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType2.ComplexOptionType4) + )) + , + DESCRIPTOR = _COMPLEXOPTIONTYPE2, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType2) + )) +_sym_db.RegisterMessage(ComplexOptionType2) +_sym_db.RegisterMessage(ComplexOptionType2.ComplexOptionType4) + +ComplexOptionType3 = _reflection.GeneratedProtocolMessageType('ComplexOptionType3', (_message.Message,), dict( + + ComplexOptionType5 = _reflection.GeneratedProtocolMessageType('ComplexOptionType5', (_message.Message,), dict( + DESCRIPTOR = _COMPLEXOPTIONTYPE3_COMPLEXOPTIONTYPE5, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType3.ComplexOptionType5) + )) + , + DESCRIPTOR = _COMPLEXOPTIONTYPE3, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOptionType3) + )) +_sym_db.RegisterMessage(ComplexOptionType3) +_sym_db.RegisterMessage(ComplexOptionType3.ComplexOptionType5) + +ComplexOpt6 = _reflection.GeneratedProtocolMessageType('ComplexOpt6', (_message.Message,), dict( + DESCRIPTOR = _COMPLEXOPT6, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ComplexOpt6) + )) +_sym_db.RegisterMessage(ComplexOpt6) + +VariousComplexOptions = _reflection.GeneratedProtocolMessageType('VariousComplexOptions', (_message.Message,), dict( + DESCRIPTOR = _VARIOUSCOMPLEXOPTIONS, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.VariousComplexOptions) + )) +_sym_db.RegisterMessage(VariousComplexOptions) + +AggregateMessageSet = _reflection.GeneratedProtocolMessageType('AggregateMessageSet', (_message.Message,), dict( + DESCRIPTOR = _AGGREGATEMESSAGESET, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.AggregateMessageSet) + )) +_sym_db.RegisterMessage(AggregateMessageSet) + +AggregateMessageSetElement = _reflection.GeneratedProtocolMessageType('AggregateMessageSetElement', (_message.Message,), dict( + DESCRIPTOR = _AGGREGATEMESSAGESETELEMENT, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.AggregateMessageSetElement) + )) +_sym_db.RegisterMessage(AggregateMessageSetElement) + +Aggregate = _reflection.GeneratedProtocolMessageType('Aggregate', (_message.Message,), dict( + DESCRIPTOR = _AGGREGATE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.Aggregate) + )) +_sym_db.RegisterMessage(Aggregate) + +AggregateMessage = _reflection.GeneratedProtocolMessageType('AggregateMessage', (_message.Message,), dict( + DESCRIPTOR = _AGGREGATEMESSAGE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.AggregateMessage) + )) +_sym_db.RegisterMessage(AggregateMessage) + +NestedOptionType = _reflection.GeneratedProtocolMessageType('NestedOptionType', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _NESTEDOPTIONTYPE_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedOptionType.NestedMessage) + )) + , + DESCRIPTOR = _NESTEDOPTIONTYPE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedOptionType) + )) +_sym_db.RegisterMessage(NestedOptionType) +_sym_db.RegisterMessage(NestedOptionType.NestedMessage) + +OldOptionType = _reflection.GeneratedProtocolMessageType('OldOptionType', (_message.Message,), dict( + DESCRIPTOR = _OLDOPTIONTYPE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.OldOptionType) + )) +_sym_db.RegisterMessage(OldOptionType) + +NewOptionType = _reflection.GeneratedProtocolMessageType('NewOptionType', (_message.Message,), dict( + DESCRIPTOR = _NEWOPTIONTYPE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.NewOptionType) + )) +_sym_db.RegisterMessage(NewOptionType) + +TestMessageWithRequiredEnumOption = _reflection.GeneratedProtocolMessageType('TestMessageWithRequiredEnumOption', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGEWITHREQUIREDENUMOPTION, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageWithRequiredEnumOption) + )) +_sym_db.RegisterMessage(TestMessageWithRequiredEnumOption) + +google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(file_opt1) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(message_opt1) +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_opt1) +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_opt2) +google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_opt1) +google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enum_value_opt1) +google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(service_opt1) +method_opt1.enum_type = _METHODOPT1 +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(method_opt1) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(bool_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(int32_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(int64_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(uint32_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(uint64_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sint32_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sint64_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(fixed32_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(fixed64_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sfixed32_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sfixed64_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(float_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(double_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(string_opt) +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(bytes_opt) +enum_opt.enum_type = _DUMMYMESSAGECONTAININGENUM_TESTENUMTYPE +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(enum_opt) +message_type_opt.message_type = _DUMMYMESSAGEINVALIDASOPTIONTYPE +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(message_type_opt) +ComplexOptionType1.RegisterExtension(quux) +corge.message_type = _COMPLEXOPTIONTYPE3 +ComplexOptionType1.RegisterExtension(corge) +ComplexOptionType2.RegisterExtension(grault) +garply.message_type = _COMPLEXOPTIONTYPE1 +ComplexOptionType2.RegisterExtension(garply) +complex_opt1.message_type = _COMPLEXOPTIONTYPE1 +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complex_opt1) +complex_opt2.message_type = _COMPLEXOPTIONTYPE2 +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complex_opt2) +complex_opt3.message_type = _COMPLEXOPTIONTYPE3 +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complex_opt3) +complexopt6.message_type = _COMPLEXOPT6 +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(complexopt6) +fileopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(fileopt) +msgopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(msgopt) +fieldopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(fieldopt) +enumopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumopt) +enumvalopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalopt) +serviceopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(serviceopt) +methodopt.message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(methodopt) +required_enum_opt.message_type = _OLDOPTIONTYPE +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(required_enum_opt) +_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4.extensions_by_name['complex_opt4'].message_type = _COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4 +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(_COMPLEXOPTIONTYPE2_COMPLEXOPTIONTYPE4.extensions_by_name['complex_opt4']) +_AGGREGATEMESSAGESETELEMENT.extensions_by_name['message_set_extension'].message_type = _AGGREGATEMESSAGESETELEMENT +AggregateMessageSet.RegisterExtension(_AGGREGATEMESSAGESETELEMENT.extensions_by_name['message_set_extension']) +_AGGREGATE.extensions_by_name['nested'].message_type = _AGGREGATE +google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(_AGGREGATE.extensions_by_name['nested']) +google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(_NESTEDOPTIONTYPE.extensions_by_name['nested_extension']) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\200\001\001\210\001\001\220\001\001\360\350\301\035\352\255\300\345$\372\354\205;p\010d\022\016FileAnnotation\032\026\022\024NestedFileAnnotation\"\036\372\354\205;\031\022\027FileExtensionAnnotation*$\013\020\366\353\256\007\032\033\n\031EmbeddedMessageSetElement\014')) +_AGGREGATEENUM.has_options = True +_AGGREGATEENUM._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\222\225\210;\020\022\016EnumAnnotation')) +_AGGREGATEENUM.values_by_name["VALUE"].has_options = True +_AGGREGATEENUM.values_by_name["VALUE"]._options = _descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\312\374\211;\025\022\023EnumValueAnnotation')) +_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.has_options = True +_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\374\377\377')) +_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.values_by_name["ANENUM_VAL2"].has_options = True +_TESTMESSAGEWITHCUSTOMOPTIONS_ANENUM.values_by_name["ANENUM_VAL2"]._options = _descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005{')) +_TESTMESSAGEWITHCUSTOMOPTIONS.fields_by_name['field1'].has_options = True +_TESTMESSAGEWITHCUSTOMOPTIONS.fields_by_name['field1']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001\301\340\303\035-\341u\n\002\000\000\000')) +_TESTMESSAGEWITHCUSTOMOPTIONS.has_options = True +_TESTMESSAGEWITHCUSTOMOPTIONS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\000\340\351\302\035\310\377\377\377\377\377\377\377\377\001')) +_CUSTOMOPTIONMININTEGERVALUES.has_options = True +_CUSTOMOPTIONMININTEGERVALUES._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\000\350\306\262\035\200\200\200\200\370\377\377\377\377\001\260\274\262\035\200\200\200\200\200\200\200\200\200\001\200\223\262\035\000\370\365\260\035\000\200\304\260\035\377\377\377\377\017\370\227\260\035\377\377\377\377\377\377\377\377\377\001\235\365\257\035\000\000\000\000\221\356\257\035\000\000\000\000\000\000\000\000\255\215\257\035\000\000\000\200\231\326\250\035\000\000\000\000\000\000\000\200')) +_CUSTOMOPTIONMAXINTEGERVALUES.has_options = True +_CUSTOMOPTIONMAXINTEGERVALUES._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\320\336\262\035\001\350\306\262\035\377\377\377\377\007\260\274\262\035\377\377\377\377\377\377\377\377\177\200\223\262\035\377\377\377\377\017\370\365\260\035\377\377\377\377\377\377\377\377\377\001\200\304\260\035\376\377\377\377\017\370\227\260\035\376\377\377\377\377\377\377\377\377\001\235\365\257\035\377\377\377\377\221\356\257\035\377\377\377\377\377\377\377\377\255\215\257\035\377\377\377\177\231\326\250\035\377\377\377\377\377\377\377\177')) +_CUSTOMOPTIONOTHERVALUES.has_options = True +_CUSTOMOPTIONOTHERVALUES._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\350\306\262\035\234\377\377\377\377\377\377\377\377\001\365\337\243\035\347\207EA\351\334\242\035\373Y\214B\312\300\363?\252\334\242\035\016Hello, \"World\"\262\331\242\035\013Hello\000World\210\331\242\035\351\377\377\377\377\377\377\377\377\001')) +_SETTINGREALSFROMPOSITIVEINTS.has_options = True +_SETTINGREALSFROMPOSITIVEINTS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@A\351\334\242\035\000\000\000\000\000@c@')) +_SETTINGREALSFROMNEGATIVEINTS.has_options = True +_SETTINGREALSFROMNEGATIVEINTS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\365\337\243\035\000\000@\301\351\334\242\035\000\000\000\000\000@c\300')) +_VARIOUSCOMPLEXOPTIONS.has_options = True +_VARIOUSCOMPLEXOPTIONS._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\242\342\225\035\002\010*\242\342\225\035\006\330\205\236\035\304\002\242\342\225\035\010\222\365\235\035\003\010\354\006\242\342\225\035\002 c\242\342\225\035\002 X\252\375\220\035\003\020\333\007\252\375\220\035\006\370\346\227\035\216\005\252\375\220\035\005\n\003\010\347\005\252\375\220\035\010\n\006\330\205\236\035\317\017\252\375\220\035\n\n\010\222\365\235\035\003\010\330\017\252\375\220\035\010\302\254\227\035\003\010\345\005\252\375\220\035\013\302\254\227\035\006\330\205\236\035\316\017\252\375\220\035\r\302\254\227\035\010\222\365\235\035\003\010\311\020\322\250\217\035\003\010\263\017\252\375\220\035\005\032\003\010\301\002\252\375\220\035\004\"\002\010e\252\375\220\035\005\"\003\010\324\001\372\336\220\035\002\010\t\372\336\220\035\004\023\030\026\024\343\334\374\034\370\375\373\034\030\344\334\374\034')) +_AGGREGATEMESSAGESET.has_options = True +_AGGREGATEMESSAGESET._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')) +_AGGREGATEMESSAGE.fields_by_name['fieldname'].has_options = True +_AGGREGATEMESSAGE.fields_by_name['fieldname']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\362\241\207;\021\022\017FieldAnnotation')) +_AGGREGATEMESSAGE.has_options = True +_AGGREGATEMESSAGE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\302\321\206;\025\010e\022\021MessageAnnotation')) +_NESTEDOPTIONTYPE_NESTEDMESSAGE.fields_by_name['nested_field'].has_options = True +_NESTEDOPTIONTYPE_NESTEDMESSAGE.fields_by_name['nested_field']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\301\340\303\035\352\003\000\000\000\000\000\000')) +_NESTEDOPTIONTYPE_NESTEDMESSAGE.has_options = True +_NESTEDOPTIONTYPE_NESTEDMESSAGE._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\340\351\302\035\351\007')) +_NESTEDOPTIONTYPE_NESTEDENUM.has_options = True +_NESTEDOPTIONTYPE_NESTEDENUM._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\305\366\311\035\353\003\000\000')) +_NESTEDOPTIONTYPE_NESTEDENUM.values_by_name["NESTED_ENUM_VALUE"].has_options = True +_NESTEDOPTIONTYPE_NESTEDENUM.values_by_name["NESTED_ENUM_VALUE"]._options = _descriptor._ParseOptions(descriptor_pb2.EnumValueOptions(), _b('\260\206\372\005\354\007')) +_NESTEDOPTIONTYPE.extensions_by_name['nested_extension'].has_options = True +_NESTEDOPTIONTYPE.extensions_by_name['nested_extension']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\310\213\312\035\355\007')) +_TESTMESSAGEWITHREQUIREDENUMOPTION.has_options = True +_TESTMESSAGEWITHREQUIREDENUMOPTION._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\372\350\374\224\003\002\010\000')) + +_TESTSERVICEWITHCUSTOMOPTIONS = _descriptor.ServiceDescriptor( + name='TestServiceWithCustomOptions', + full_name='protobuf_unittest.TestServiceWithCustomOptions', + file=DESCRIPTOR, + index=0, + options=_descriptor._ParseOptions(descriptor_pb2.ServiceOptions(), _b('\220\262\213\036\323\333\200\313I')), + serialized_start=3092, + serialized_end=3234, + methods=[ + _descriptor.MethodDescriptor( + name='Foo', + full_name='protobuf_unittest.TestServiceWithCustomOptions.Foo', + index=0, + containing_service=None, + input_type=_CUSTOMOPTIONFOOREQUEST, + output_type=_CUSTOMOPTIONFOORESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\340\372\214\036\002')), + ), +]) + +TestServiceWithCustomOptions = service_reflection.GeneratedServiceType('TestServiceWithCustomOptions', (_service.Service,), dict( + DESCRIPTOR = _TESTSERVICEWITHCUSTOMOPTIONS, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + )) + +TestServiceWithCustomOptions_Stub = service_reflection.GeneratedServiceStubType('TestServiceWithCustomOptions_Stub', (TestServiceWithCustomOptions,), dict( + DESCRIPTOR = _TESTSERVICEWITHCUSTOMOPTIONS, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + )) + + + +_AGGREGATESERVICE = _descriptor.ServiceDescriptor( + name='AggregateService', + full_name='protobuf_unittest.AggregateService', + file=DESCRIPTOR, + index=1, + options=_descriptor._ParseOptions(descriptor_pb2.ServiceOptions(), _b('\312\373\216;\023\022\021ServiceAnnotation')), + serialized_start=3237, + serialized_end=3390, + methods=[ + _descriptor.MethodDescriptor( + name='Method', + full_name='protobuf_unittest.AggregateService.Method', + index=0, + containing_service=None, + input_type=_AGGREGATEMESSAGE, + output_type=_AGGREGATEMESSAGE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\312\310\226;\022\022\020MethodAnnotation')), + ), +]) + +AggregateService = service_reflection.GeneratedServiceType('AggregateService', (_service.Service,), dict( + DESCRIPTOR = _AGGREGATESERVICE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + )) + +AggregateService_Stub = service_reflection.GeneratedServiceStubType('AggregateService_Stub', (AggregateService,), dict( + DESCRIPTOR = _AGGREGATESERVICE, + __module__ = 'google.protobuf.unittest_custom_options_pb2' + )) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_import_pb2.py b/deps/google/protobuf/unittest_import_pb2.py new file mode 100644 index 00000000..0e80b85e --- /dev/null +++ b/deps/google/protobuf/unittest_import_pb2.py @@ -0,0 +1,137 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_import.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_import_public_pb2 as google_dot_protobuf_dot_unittest__import__public__pb2 + +from google.protobuf.unittest_import_public_pb2 import * + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_import.proto', + package='protobuf_unittest_import', + syntax='proto2', + serialized_pb=_b('\n%google/protobuf/unittest_import.proto\x12\x18protobuf_unittest_import\x1a,google/protobuf/unittest_import_public.proto\"\x1a\n\rImportMessage\x12\t\n\x01\x64\x18\x01 \x01(\x05*<\n\nImportEnum\x12\x0e\n\nIMPORT_FOO\x10\x07\x12\x0e\n\nIMPORT_BAR\x10\x08\x12\x0e\n\nIMPORT_BAZ\x10\t*1\n\x10ImportEnumForMap\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x42\x1f\n\x18\x63om.google.protobuf.testH\x01\xf8\x01\x01P\x00') + , + dependencies=[google_dot_protobuf_dot_unittest__import__public__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_IMPORTENUM = _descriptor.EnumDescriptor( + name='ImportEnum', + full_name='protobuf_unittest_import.ImportEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='IMPORT_FOO', index=0, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPORT_BAR', index=1, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPORT_BAZ', index=2, number=9, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=141, + serialized_end=201, +) +_sym_db.RegisterEnumDescriptor(_IMPORTENUM) + +ImportEnum = enum_type_wrapper.EnumTypeWrapper(_IMPORTENUM) +_IMPORTENUMFORMAP = _descriptor.EnumDescriptor( + name='ImportEnumForMap', + full_name='protobuf_unittest_import.ImportEnumForMap', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOO', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=203, + serialized_end=252, +) +_sym_db.RegisterEnumDescriptor(_IMPORTENUMFORMAP) + +ImportEnumForMap = enum_type_wrapper.EnumTypeWrapper(_IMPORTENUMFORMAP) +IMPORT_FOO = 7 +IMPORT_BAR = 8 +IMPORT_BAZ = 9 +UNKNOWN = 0 +FOO = 1 +BAR = 2 + + + +_IMPORTMESSAGE = _descriptor.Descriptor( + name='ImportMessage', + full_name='protobuf_unittest_import.ImportMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='d', full_name='protobuf_unittest_import.ImportMessage.d', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=139, +) + +DESCRIPTOR.message_types_by_name['ImportMessage'] = _IMPORTMESSAGE +DESCRIPTOR.enum_types_by_name['ImportEnum'] = _IMPORTENUM +DESCRIPTOR.enum_types_by_name['ImportEnumForMap'] = _IMPORTENUMFORMAP + +ImportMessage = _reflection.GeneratedProtocolMessageType('ImportMessage', (_message.Message,), dict( + DESCRIPTOR = _IMPORTMESSAGE, + __module__ = 'google.protobuf.unittest_import_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_import.ImportMessage) + )) +_sym_db.RegisterMessage(ImportMessage) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030com.google.protobuf.testH\001\370\001\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_import_public_pb2.py b/deps/google/protobuf/unittest_import_public_pb2.py new file mode 100644 index 00000000..8627385f --- /dev/null +++ b/deps/google/protobuf/unittest_import_public_pb2.py @@ -0,0 +1,71 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_import_public.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_import_public.proto', + package='protobuf_unittest_import', + syntax='proto2', + serialized_pb=_b('\n,google/protobuf/unittest_import_public.proto\x12\x18protobuf_unittest_import\" \n\x13PublicImportMessage\x12\t\n\x01\x65\x18\x01 \x01(\x05\x42\x1a\n\x18\x63om.google.protobuf.test') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_PUBLICIMPORTMESSAGE = _descriptor.Descriptor( + name='PublicImportMessage', + full_name='protobuf_unittest_import.PublicImportMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='e', full_name='protobuf_unittest_import.PublicImportMessage.e', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=74, + serialized_end=106, +) + +DESCRIPTOR.message_types_by_name['PublicImportMessage'] = _PUBLICIMPORTMESSAGE + +PublicImportMessage = _reflection.GeneratedProtocolMessageType('PublicImportMessage', (_message.Message,), dict( + DESCRIPTOR = _PUBLICIMPORTMESSAGE, + __module__ = 'google.protobuf.unittest_import_public_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_import.PublicImportMessage) + )) +_sym_db.RegisterMessage(PublicImportMessage) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030com.google.protobuf.test')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_mset_pb2.py b/deps/google/protobuf/unittest_mset_pb2.py new file mode 100644 index 00000000..c76e4845 --- /dev/null +++ b/deps/google/protobuf/unittest_mset_pb2.py @@ -0,0 +1,256 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_mset.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_mset_wire_format_pb2 as google_dot_protobuf_dot_unittest__mset__wire__format__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_mset.proto', + package='protobuf_unittest', + syntax='proto2', + serialized_pb=_b('\n#google/protobuf/unittest_mset.proto\x12\x11protobuf_unittest\x1a/google/protobuf/unittest_mset_wire_format.proto\"Z\n\x17TestMessageSetContainer\x12?\n\x0bmessage_set\x18\x01 \x01(\x0b\x32*.proto2_wireformat_unittest.TestMessageSet\"\x9f\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32x\n\x15message_set_extension\x12*.proto2_wireformat_unittest.TestMessageSet\x18\xb0\xa6^ \x01(\x0b\x32+.protobuf_unittest.TestMessageSetExtension1\"\xa1\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2x\n\x15message_set_extension\x12*.proto2_wireformat_unittest.TestMessageSet\x18\xf9\xbb^ \x01(\x0b\x32+.protobuf_unittest.TestMessageSetExtension2\"n\n\rRawMessageSet\x12\x33\n\x04item\x18\x01 \x03(\n2%.protobuf_unittest.RawMessageSet.Item\x1a(\n\x04Item\x12\x0f\n\x07type_id\x18\x02 \x02(\x05\x12\x0f\n\x07message\x18\x03 \x02(\x0c\x42\x05H\x01\xf8\x01\x01') + , + dependencies=[google_dot_protobuf_dot_unittest__mset__wire__format__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TESTMESSAGESETCONTAINER = _descriptor.Descriptor( + name='TestMessageSetContainer', + full_name='protobuf_unittest.TestMessageSetContainer', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_set', full_name='protobuf_unittest.TestMessageSetContainer.message_set', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=107, + serialized_end=197, +) + + +_TESTMESSAGESETEXTENSION1 = _descriptor.Descriptor( + name='TestMessageSetExtension1', + full_name='protobuf_unittest.TestMessageSetExtension1', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='i', full_name='protobuf_unittest.TestMessageSetExtension1.i', index=0, + number=15, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='message_set_extension', full_name='protobuf_unittest.TestMessageSetExtension1.message_set_extension', index=0, + number=1545008, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=200, + serialized_end=359, +) + + +_TESTMESSAGESETEXTENSION2 = _descriptor.Descriptor( + name='TestMessageSetExtension2', + full_name='protobuf_unittest.TestMessageSetExtension2', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='str', full_name='protobuf_unittest.TestMessageSetExtension2.str', index=0, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='message_set_extension', full_name='protobuf_unittest.TestMessageSetExtension2.message_set_extension', index=0, + number=1547769, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=362, + serialized_end=523, +) + + +_RAWMESSAGESET_ITEM = _descriptor.Descriptor( + name='Item', + full_name='protobuf_unittest.RawMessageSet.Item', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type_id', full_name='protobuf_unittest.RawMessageSet.Item.type_id', index=0, + number=2, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='protobuf_unittest.RawMessageSet.Item.message', index=1, + number=3, type=12, cpp_type=9, label=2, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=595, + serialized_end=635, +) + +_RAWMESSAGESET = _descriptor.Descriptor( + name='RawMessageSet', + full_name='protobuf_unittest.RawMessageSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='item', full_name='protobuf_unittest.RawMessageSet.item', index=0, + number=1, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_RAWMESSAGESET_ITEM, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=525, + serialized_end=635, +) + +_TESTMESSAGESETCONTAINER.fields_by_name['message_set'].message_type = google_dot_protobuf_dot_unittest__mset__wire__format__pb2._TESTMESSAGESET +_RAWMESSAGESET_ITEM.containing_type = _RAWMESSAGESET +_RAWMESSAGESET.fields_by_name['item'].message_type = _RAWMESSAGESET_ITEM +DESCRIPTOR.message_types_by_name['TestMessageSetContainer'] = _TESTMESSAGESETCONTAINER +DESCRIPTOR.message_types_by_name['TestMessageSetExtension1'] = _TESTMESSAGESETEXTENSION1 +DESCRIPTOR.message_types_by_name['TestMessageSetExtension2'] = _TESTMESSAGESETEXTENSION2 +DESCRIPTOR.message_types_by_name['RawMessageSet'] = _RAWMESSAGESET + +TestMessageSetContainer = _reflection.GeneratedProtocolMessageType('TestMessageSetContainer', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETCONTAINER, + __module__ = 'google.protobuf.unittest_mset_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageSetContainer) + )) +_sym_db.RegisterMessage(TestMessageSetContainer) + +TestMessageSetExtension1 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension1', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETEXTENSION1, + __module__ = 'google.protobuf.unittest_mset_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageSetExtension1) + )) +_sym_db.RegisterMessage(TestMessageSetExtension1) + +TestMessageSetExtension2 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension2', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETEXTENSION2, + __module__ = 'google.protobuf.unittest_mset_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageSetExtension2) + )) +_sym_db.RegisterMessage(TestMessageSetExtension2) + +RawMessageSet = _reflection.GeneratedProtocolMessageType('RawMessageSet', (_message.Message,), dict( + + Item = _reflection.GeneratedProtocolMessageType('Item', (_message.Message,), dict( + DESCRIPTOR = _RAWMESSAGESET_ITEM, + __module__ = 'google.protobuf.unittest_mset_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.RawMessageSet.Item) + )) + , + DESCRIPTOR = _RAWMESSAGESET, + __module__ = 'google.protobuf.unittest_mset_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.RawMessageSet) + )) +_sym_db.RegisterMessage(RawMessageSet) +_sym_db.RegisterMessage(RawMessageSet.Item) + +_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION1 +google_dot_protobuf_dot_unittest__mset__wire__format__pb2.TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) +_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION2 +google_dot_protobuf_dot_unittest__mset__wire__format__pb2.TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\001\370\001\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_mset_wire_format_pb2.py b/deps/google/protobuf/unittest_mset_wire_format_pb2.py new file mode 100644 index 00000000..acab49c5 --- /dev/null +++ b/deps/google/protobuf/unittest_mset_wire_format_pb2.py @@ -0,0 +1,106 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_mset_wire_format.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_mset_wire_format.proto', + package='proto2_wireformat_unittest', + syntax='proto2', + serialized_pb=_b('\n/google/protobuf/unittest_mset_wire_format.proto\x12\x1aproto2_wireformat_unittest\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"d\n!TestMessageSetWireFormatContainer\x12?\n\x0bmessage_set\x18\x01 \x01(\x0b\x32*.proto2_wireformat_unittest.TestMessageSetB)H\x01\xf8\x01\x01\xaa\x02!Google.ProtocolBuffers.TestProtos') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TESTMESSAGESET = _descriptor.Descriptor( + name='TestMessageSet', + full_name='proto2_wireformat_unittest.TestMessageSet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')), + is_extendable=True, + syntax='proto2', + extension_ranges=[(4, 2147483647), ], + oneofs=[ + ], + serialized_start=79, + serialized_end=109, +) + + +_TESTMESSAGESETWIREFORMATCONTAINER = _descriptor.Descriptor( + name='TestMessageSetWireFormatContainer', + full_name='proto2_wireformat_unittest.TestMessageSetWireFormatContainer', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_set', full_name='proto2_wireformat_unittest.TestMessageSetWireFormatContainer.message_set', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=111, + serialized_end=211, +) + +_TESTMESSAGESETWIREFORMATCONTAINER.fields_by_name['message_set'].message_type = _TESTMESSAGESET +DESCRIPTOR.message_types_by_name['TestMessageSet'] = _TESTMESSAGESET +DESCRIPTOR.message_types_by_name['TestMessageSetWireFormatContainer'] = _TESTMESSAGESETWIREFORMATCONTAINER + +TestMessageSet = _reflection.GeneratedProtocolMessageType('TestMessageSet', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESET, + __module__ = 'google.protobuf.unittest_mset_wire_format_pb2' + # @@protoc_insertion_point(class_scope:proto2_wireformat_unittest.TestMessageSet) + )) +_sym_db.RegisterMessage(TestMessageSet) + +TestMessageSetWireFormatContainer = _reflection.GeneratedProtocolMessageType('TestMessageSetWireFormatContainer', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGESETWIREFORMATCONTAINER, + __module__ = 'google.protobuf.unittest_mset_wire_format_pb2' + # @@protoc_insertion_point(class_scope:proto2_wireformat_unittest.TestMessageSetWireFormatContainer) + )) +_sym_db.RegisterMessage(TestMessageSetWireFormatContainer) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\001\370\001\001\252\002!Google.ProtocolBuffers.TestProtos')) +_TESTMESSAGESET.has_options = True +_TESTMESSAGESET._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('\010\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_no_arena_import_pb2.py b/deps/google/protobuf/unittest_no_arena_import_pb2.py new file mode 100644 index 00000000..fb3ddc7c --- /dev/null +++ b/deps/google/protobuf/unittest_no_arena_import_pb2.py @@ -0,0 +1,69 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_no_arena_import.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_no_arena_import.proto', + package='proto2_arena_unittest', + syntax='proto2', + serialized_pb=_b('\n.google/protobuf/unittest_no_arena_import.proto\x12\x15proto2_arena_unittest\"\'\n\x1aImportNoArenaNestedMessage\x12\t\n\x01\x64\x18\x01 \x01(\x05') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_IMPORTNOARENANESTEDMESSAGE = _descriptor.Descriptor( + name='ImportNoArenaNestedMessage', + full_name='proto2_arena_unittest.ImportNoArenaNestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='d', full_name='proto2_arena_unittest.ImportNoArenaNestedMessage.d', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=73, + serialized_end=112, +) + +DESCRIPTOR.message_types_by_name['ImportNoArenaNestedMessage'] = _IMPORTNOARENANESTEDMESSAGE + +ImportNoArenaNestedMessage = _reflection.GeneratedProtocolMessageType('ImportNoArenaNestedMessage', (_message.Message,), dict( + DESCRIPTOR = _IMPORTNOARENANESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_no_arena_import_pb2' + # @@protoc_insertion_point(class_scope:proto2_arena_unittest.ImportNoArenaNestedMessage) + )) +_sym_db.RegisterMessage(ImportNoArenaNestedMessage) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_no_arena_pb2.py b/deps/google/protobuf/unittest_no_arena_pb2.py new file mode 100644 index 00000000..b42dbf9a --- /dev/null +++ b/deps/google/protobuf/unittest_no_arena_pb2.py @@ -0,0 +1,918 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_no_arena.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2 +google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2 +from google.protobuf import unittest_arena_pb2 as google_dot_protobuf_dot_unittest__arena__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_no_arena.proto', + package='protobuf_unittest_no_arena', + syntax='proto2', + serialized_pb=_b('\n\'google/protobuf/unittest_no_arena.proto\x12\x1aprotobuf_unittest_no_arena\x1a%google/protobuf/unittest_import.proto\x1a$google/protobuf/unittest_arena.proto\"\xd0\x1a\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12M\n\roptionalgroup\x18\x10 \x01(\n26.protobuf_unittest_no_arena.TestAllTypes.OptionalGroup\x12W\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessage\x12L\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12Q\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32\x33.protobuf_unittest_no_arena.TestAllTypes.NestedEnum\x12\x46\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\'.protobuf_unittest_no_arena.ForeignEnum\x12\x42\n\x14optional_import_enum\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12T\n\x10optional_message\x18\x1b \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12M\n\rrepeatedgroup\x18. \x03(\n26.protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup\x12W\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessage\x12L\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32*.protobuf_unittest_no_arena.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12Q\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32\x33.protobuf_unittest_no_arena.TestAllTypes.NestedEnum\x12\x46\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\'.protobuf_unittest_no_arena.ForeignEnum\x12\x42\n\x14repeated_import_enum\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12Y\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageB\x02(\x01\x12\x19\n\rdefault_int32\x18= \x01(\x05:\x02\x34\x31\x12\x19\n\rdefault_int64\x18> \x01(\x03:\x02\x34\x32\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint32\x18? \x01(\r:\x02\x34\x33\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint64\x18@ \x01(\x04:\x02\x34\x34\x12\x1b\n\x0e\x64\x65\x66\x61ult_sint32\x18\x41 \x01(\x11:\x03-45\x12\x1a\n\x0e\x64\x65\x66\x61ult_sint64\x18\x42 \x01(\x12:\x02\x34\x36\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed32\x18\x43 \x01(\x07:\x02\x34\x37\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed64\x18\x44 \x01(\x06:\x02\x34\x38\x12\x1c\n\x10\x64\x65\x66\x61ult_sfixed32\x18\x45 \x01(\x0f:\x02\x34\x39\x12\x1d\n\x10\x64\x65\x66\x61ult_sfixed64\x18\x46 \x01(\x10:\x03-50\x12\x1b\n\rdefault_float\x18G \x01(\x02:\x04\x35\x31.5\x12\x1d\n\x0e\x64\x65\x66\x61ult_double\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30\x12\x1a\n\x0c\x64\x65\x66\x61ult_bool\x18I \x01(\x08:\x04true\x12\x1d\n\x0e\x64\x65\x66\x61ult_string\x18J \x01(\t:\x05hello\x12\x1c\n\rdefault_bytes\x18K \x01(\x0c:\x05world\x12U\n\x13\x64\x65\x66\x61ult_nested_enum\x18Q \x01(\x0e\x32\x33.protobuf_unittest_no_arena.TestAllTypes.NestedEnum:\x03\x42\x41R\x12R\n\x14\x64\x65\x66\x61ult_foreign_enum\x18R \x01(\x0e\x32\'.protobuf_unittest_no_arena.ForeignEnum:\x0b\x46OREIGN_BAR\x12M\n\x13\x64\x65\x66\x61ult_import_enum\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR\x12%\n\x14\x64\x65\x66\x61ult_string_piece\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02\x12\x1d\n\x0c\x64\x65\x66\x61ult_cord\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12V\n\x14oneof_nested_message\x18p \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x12_\n\x19lazy_oneof_nested_message\x18s \x01(\x0b\x32\x36.protobuf_unittest_no_arena.TestAllTypes.NestedMessageB\x02(\x01H\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x11 \x01(\x05\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x61\x18/ \x01(\x05\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\x1b\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\"P\n\x12TestNoArenaMessage\x12:\n\rarena_message\x18\x01 \x01(\x0b\x32#.proto2_arena_unittest.ArenaMessage*@\n\x0b\x46oreignEnum\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06\x42%B\rUnittestProtoH\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf8\x01\x00\xa2\x02\x05NOARN') + , + dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,google_dot_protobuf_dot_unittest__arena__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_FOREIGNENUM = _descriptor.EnumDescriptor( + name='ForeignEnum', + full_name='protobuf_unittest_no_arena.ForeignEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOREIGN_FOO', index=0, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_BAR', index=1, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_BAZ', index=2, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3670, + serialized_end=3734, +) +_sym_db.RegisterEnumDescriptor(_FOREIGNENUM) + +ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM) +FOREIGN_FOO = 4 +FOREIGN_BAR = 5 +FOREIGN_BAZ = 6 + + +_TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='protobuf_unittest_no_arena.TestAllTypes.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAZ', index=2, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=3, number=-1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3485, + serialized_end=3542, +) +_sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM) + + +_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest_no_arena.TestAllTypes.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bb', full_name='protobuf_unittest_no_arena.TestAllTypes.NestedMessage.bb', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3400, + serialized_end=3427, +) + +_TESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor( + name='OptionalGroup', + full_name='protobuf_unittest_no_arena.TestAllTypes.OptionalGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest_no_arena.TestAllTypes.OptionalGroup.a', index=0, + number=17, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3429, + serialized_end=3455, +) + +_TESTALLTYPES_REPEATEDGROUP = _descriptor.Descriptor( + name='RepeatedGroup', + full_name='protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup.a', index=0, + number=47, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3457, + serialized_end=3483, +) + +_TESTALLTYPES = _descriptor.Descriptor( + name='TestAllTypes', + full_name='protobuf_unittest_no_arena.TestAllTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_int32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_int32', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_int64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_int64', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_uint32', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_uint64', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sint32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sint32', index=4, + number=5, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sint64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sint64', index=5, + number=6, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_fixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_fixed32', index=6, + number=7, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_fixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_fixed64', index=7, + number=8, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sfixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sfixed32', index=8, + number=9, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sfixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_sfixed64', index=9, + number=10, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_float', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_float', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_double', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_double', index=11, + number=12, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_bool', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_bool', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_string', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_string', index=13, + number=14, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_bytes', index=14, + number=15, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optionalgroup', full_name='protobuf_unittest_no_arena.TestAllTypes.optionalgroup', index=15, + number=16, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_nested_message', index=16, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_foreign_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_foreign_message', index=17, + number=19, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_import_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_import_message', index=18, + number=20, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_nested_enum', index=19, + number=21, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_foreign_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_foreign_enum', index=20, + number=22, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_import_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_import_enum', index=21, + number=23, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=7, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_string_piece', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_string_piece', index=22, + number=24, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='optional_cord', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_cord', index=23, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='optional_public_import_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_public_import_message', index=24, + number=26, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_message', full_name='protobuf_unittest_no_arena.TestAllTypes.optional_message', index=25, + number=27, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_int32', index=26, + number=31, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_int64', index=27, + number=32, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_uint32', index=28, + number=33, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_uint64', index=29, + number=34, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sint32', index=30, + number=35, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sint64', index=31, + number=36, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_fixed32', index=32, + number=37, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_fixed64', index=33, + number=38, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sfixed32', index=34, + number=39, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_sfixed64', index=35, + number=40, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_float', index=36, + number=41, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_double', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_double', index=37, + number=42, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bool', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_bool', index=38, + number=43, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_string', index=39, + number=44, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_bytes', index=40, + number=45, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeatedgroup', full_name='protobuf_unittest_no_arena.TestAllTypes.repeatedgroup', index=41, + number=46, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_nested_message', index=42, + number=48, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_foreign_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_foreign_message', index=43, + number=49, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_import_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_import_message', index=44, + number=50, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_nested_enum', index=45, + number=51, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_foreign_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_foreign_enum', index=46, + number=52, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_import_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_import_enum', index=47, + number=53, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string_piece', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_string_piece', index=48, + number=54, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='repeated_cord', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_cord', index=49, + number=55, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='repeated_lazy_message', full_name='protobuf_unittest_no_arena.TestAllTypes.repeated_lazy_message', index=50, + number=57, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='default_int32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_int32', index=51, + number=61, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=41, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_int64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_int64', index=52, + number=62, type=3, cpp_type=2, label=1, + has_default_value=True, default_value=42, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_uint32', index=53, + number=63, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=43, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_uint64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_uint64', index=54, + number=64, type=4, cpp_type=4, label=1, + has_default_value=True, default_value=44, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sint32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sint32', index=55, + number=65, type=17, cpp_type=1, label=1, + has_default_value=True, default_value=-45, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sint64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sint64', index=56, + number=66, type=18, cpp_type=2, label=1, + has_default_value=True, default_value=46, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_fixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_fixed32', index=57, + number=67, type=7, cpp_type=3, label=1, + has_default_value=True, default_value=47, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_fixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_fixed64', index=58, + number=68, type=6, cpp_type=4, label=1, + has_default_value=True, default_value=48, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sfixed32', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sfixed32', index=59, + number=69, type=15, cpp_type=1, label=1, + has_default_value=True, default_value=49, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sfixed64', full_name='protobuf_unittest_no_arena.TestAllTypes.default_sfixed64', index=60, + number=70, type=16, cpp_type=2, label=1, + has_default_value=True, default_value=-50, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_float', full_name='protobuf_unittest_no_arena.TestAllTypes.default_float', index=61, + number=71, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=51.5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_double', full_name='protobuf_unittest_no_arena.TestAllTypes.default_double', index=62, + number=72, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=52000, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_bool', full_name='protobuf_unittest_no_arena.TestAllTypes.default_bool', index=63, + number=73, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_string', full_name='protobuf_unittest_no_arena.TestAllTypes.default_string', index=64, + number=74, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("hello").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.default_bytes', index=65, + number=75, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("world"), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_nested_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.default_nested_enum', index=66, + number=81, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_foreign_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.default_foreign_enum', index=67, + number=82, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_import_enum', full_name='protobuf_unittest_no_arena.TestAllTypes.default_import_enum', index=68, + number=83, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=8, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_string_piece', full_name='protobuf_unittest_no_arena.TestAllTypes.default_string_piece', index=69, + number=84, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("abc").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='default_cord', full_name='protobuf_unittest_no_arena.TestAllTypes.default_cord', index=70, + number=85, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("123").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='oneof_uint32', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_uint32', index=71, + number=111, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_nested_message', index=72, + number=112, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_string', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_string', index=73, + number=113, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_bytes', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_bytes', index=74, + number=114, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lazy_oneof_nested_message', full_name='protobuf_unittest_no_arena.TestAllTypes.lazy_oneof_nested_message', index=75, + number=115, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + ], + extensions=[ + ], + nested_types=[_TESTALLTYPES_NESTEDMESSAGE, _TESTALLTYPES_OPTIONALGROUP, _TESTALLTYPES_REPEATEDGROUP, ], + enum_types=[ + _TESTALLTYPES_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='oneof_field', full_name='protobuf_unittest_no_arena.TestAllTypes.oneof_field', + index=0, containing_type=None, fields=[]), + ], + serialized_start=149, + serialized_end=3557, +) + + +_FOREIGNMESSAGE = _descriptor.Descriptor( + name='ForeignMessage', + full_name='protobuf_unittest_no_arena.ForeignMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='c', full_name='protobuf_unittest_no_arena.ForeignMessage.c', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3559, + serialized_end=3586, +) + + +_TESTNOARENAMESSAGE = _descriptor.Descriptor( + name='TestNoArenaMessage', + full_name='protobuf_unittest_no_arena.TestNoArenaMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='arena_message', full_name='protobuf_unittest_no_arena.TestNoArenaMessage.arena_message', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3588, + serialized_end=3668, +) + +_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES +_TESTALLTYPES_OPTIONALGROUP.containing_type = _TESTALLTYPES +_TESTALLTYPES_REPEATEDGROUP.containing_type = _TESTALLTYPES +_TESTALLTYPES.fields_by_name['optionalgroup'].message_type = _TESTALLTYPES_OPTIONALGROUP +_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +_TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['optional_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +_TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE +_TESTALLTYPES.fields_by_name['optional_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['repeatedgroup'].message_type = _TESTALLTYPES_REPEATEDGROUP +_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +_TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['repeated_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +_TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['default_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['default_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['default_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +_TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_uint32']) +_TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_nested_message']) +_TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_string']) +_TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_bytes']) +_TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['lazy_oneof_nested_message']) +_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTNOARENAMESSAGE.fields_by_name['arena_message'].message_type = google_dot_protobuf_dot_unittest__arena__pb2._ARENAMESSAGE +DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES +DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE +DESCRIPTOR.message_types_by_name['TestNoArenaMessage'] = _TESTNOARENAMESSAGE +DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM + +TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_no_arena_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes.NestedMessage) + )) + , + + OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_OPTIONALGROUP, + __module__ = 'google.protobuf.unittest_no_arena_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes.OptionalGroup) + )) + , + + RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_REPEATEDGROUP, + __module__ = 'google.protobuf.unittest_no_arena_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes.RepeatedGroup) + )) + , + DESCRIPTOR = _TESTALLTYPES, + __module__ = 'google.protobuf.unittest_no_arena_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestAllTypes) + )) +_sym_db.RegisterMessage(TestAllTypes) +_sym_db.RegisterMessage(TestAllTypes.NestedMessage) +_sym_db.RegisterMessage(TestAllTypes.OptionalGroup) +_sym_db.RegisterMessage(TestAllTypes.RepeatedGroup) + +ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict( + DESCRIPTOR = _FOREIGNMESSAGE, + __module__ = 'google.protobuf.unittest_no_arena_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.ForeignMessage) + )) +_sym_db.RegisterMessage(ForeignMessage) + +TestNoArenaMessage = _reflection.GeneratedProtocolMessageType('TestNoArenaMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTNOARENAMESSAGE, + __module__ = 'google.protobuf.unittest_no_arena_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest_no_arena.TestNoArenaMessage) + )) +_sym_db.RegisterMessage(TestNoArenaMessage) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('B\rUnittestProtoH\001\200\001\001\210\001\001\220\001\001\370\001\000\242\002\005NOARN')) +_TESTALLTYPES.fields_by_name['optional_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['optional_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['optional_cord'].has_options = True +_TESTALLTYPES.fields_by_name['optional_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['optional_message'].has_options = True +_TESTALLTYPES.fields_by_name['optional_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTALLTYPES.fields_by_name['repeated_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['repeated_cord'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['repeated_lazy_message'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTALLTYPES.fields_by_name['default_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['default_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['default_cord'].has_options = True +_TESTALLTYPES.fields_by_name['default_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message'].has_options = True +_TESTALLTYPES.fields_by_name['lazy_oneof_nested_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_no_generic_services_pb2.py b/deps/google/protobuf/unittest_no_generic_services_pb2.py new file mode 100644 index 00000000..d40ba3bf --- /dev/null +++ b/deps/google/protobuf/unittest_no_generic_services_pb2.py @@ -0,0 +1,101 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_no_generic_services.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_no_generic_services.proto', + package='google.protobuf.no_generic_services_test', + syntax='proto2', + serialized_pb=_b('\n2google/protobuf/unittest_no_generic_services.proto\x12(google.protobuf.no_generic_services_test\"#\n\x0bTestMessage\x12\t\n\x01\x61\x18\x01 \x01(\x05*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02*\x13\n\x08TestEnum\x12\x07\n\x03\x46OO\x10\x01\x32\x82\x01\n\x0bTestService\x12s\n\x03\x46oo\x12\x35.google.protobuf.no_generic_services_test.TestMessage\x1a\x35.google.protobuf.no_generic_services_test.TestMessage:N\n\x0etest_extension\x12\x35.google.protobuf.no_generic_services_test.TestMessage\x18\xe8\x07 \x01(\x05') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_TESTENUM = _descriptor.EnumDescriptor( + name='TestEnum', + full_name='google.protobuf.no_generic_services_test.TestEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO', index=0, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=133, + serialized_end=152, +) +_sym_db.RegisterEnumDescriptor(_TESTENUM) + +TestEnum = enum_type_wrapper.EnumTypeWrapper(_TESTENUM) +FOO = 1 + +TEST_EXTENSION_FIELD_NUMBER = 1000 +test_extension = _descriptor.FieldDescriptor( + name='test_extension', full_name='google.protobuf.no_generic_services_test.test_extension', index=0, + number=1000, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) + + +_TESTMESSAGE = _descriptor.Descriptor( + name='TestMessage', + full_name='google.protobuf.no_generic_services_test.TestMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='google.protobuf.no_generic_services_test.TestMessage.a', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=96, + serialized_end=131, +) + +DESCRIPTOR.message_types_by_name['TestMessage'] = _TESTMESSAGE +DESCRIPTOR.enum_types_by_name['TestEnum'] = _TESTENUM +DESCRIPTOR.extensions_by_name['test_extension'] = test_extension + +TestMessage = _reflection.GeneratedProtocolMessageType('TestMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGE, + __module__ = 'google.protobuf.unittest_no_generic_services_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.no_generic_services_test.TestMessage) + )) +_sym_db.RegisterMessage(TestMessage) + +TestMessage.RegisterExtension(test_extension) + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_pb2.py b/deps/google/protobuf/unittest_pb2.py new file mode 100644 index 00000000..51376db8 --- /dev/null +++ b/deps/google/protobuf/unittest_pb2.py @@ -0,0 +1,6092 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import service as _service +from google.protobuf import service_reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2 +google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest.proto', + package='protobuf_unittest', + syntax='proto2', + serialized_pb=_b('\n\x1egoogle/protobuf/unittest.proto\x12\x11protobuf_unittest\x1a%google/protobuf/unittest_import.proto\"\xed\x18\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12\x44\n\roptionalgroup\x18\x10 \x01(\n2-.protobuf_unittest.TestAllTypes.OptionalGroup\x12N\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\x12\x43\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12H\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum\x12=\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x42\n\x14optional_import_enum\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12P\n\x15optional_lazy_message\x18\x1b \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12\x44\n\rrepeatedgroup\x18. \x03(\n2-.protobuf_unittest.TestAllTypes.RepeatedGroup\x12N\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\x12\x43\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12H\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum\x12=\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x42\n\x14repeated_import_enum\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12P\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x19\n\rdefault_int32\x18= \x01(\x05:\x02\x34\x31\x12\x19\n\rdefault_int64\x18> \x01(\x03:\x02\x34\x32\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint32\x18? \x01(\r:\x02\x34\x33\x12\x1a\n\x0e\x64\x65\x66\x61ult_uint64\x18@ \x01(\x04:\x02\x34\x34\x12\x1b\n\x0e\x64\x65\x66\x61ult_sint32\x18\x41 \x01(\x11:\x03-45\x12\x1a\n\x0e\x64\x65\x66\x61ult_sint64\x18\x42 \x01(\x12:\x02\x34\x36\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed32\x18\x43 \x01(\x07:\x02\x34\x37\x12\x1b\n\x0f\x64\x65\x66\x61ult_fixed64\x18\x44 \x01(\x06:\x02\x34\x38\x12\x1c\n\x10\x64\x65\x66\x61ult_sfixed32\x18\x45 \x01(\x0f:\x02\x34\x39\x12\x1d\n\x10\x64\x65\x66\x61ult_sfixed64\x18\x46 \x01(\x10:\x03-50\x12\x1b\n\rdefault_float\x18G \x01(\x02:\x04\x35\x31.5\x12\x1d\n\x0e\x64\x65\x66\x61ult_double\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30\x12\x1a\n\x0c\x64\x65\x66\x61ult_bool\x18I \x01(\x08:\x04true\x12\x1d\n\x0e\x64\x65\x66\x61ult_string\x18J \x01(\t:\x05hello\x12\x1c\n\rdefault_bytes\x18K \x01(\x0c:\x05world\x12L\n\x13\x64\x65\x66\x61ult_nested_enum\x18Q \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:\x03\x42\x41R\x12I\n\x14\x64\x65\x66\x61ult_foreign_enum\x18R \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:\x0b\x46OREIGN_BAR\x12M\n\x13\x64\x65\x66\x61ult_import_enum\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR\x12%\n\x14\x64\x65\x66\x61ult_string_piece\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02\x12\x1d\n\x0c\x64\x65\x66\x61ult_cord\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12M\n\x14oneof_nested_message\x18p \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x1a\x1a\n\rOptionalGroup\x12\t\n\x01\x61\x18\x11 \x01(\x05\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x61\x18/ \x01(\x05\"9\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\xbb\x01\n\x12NestedTestAllTypes\x12\x34\n\x05\x63hild\x18\x01 \x01(\x0b\x32%.protobuf_unittest.NestedTestAllTypes\x12\x30\n\x07payload\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12=\n\x0erepeated_child\x18\x03 \x03(\x0b\x32%.protobuf_unittest.NestedTestAllTypes\"4\n\x14TestDeprecatedFields\x12\x1c\n\x10\x64\x65precated_int32\x18\x01 \x01(\x05\x42\x02\x18\x01\"\x1b\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\"0\n\x12TestReservedFieldsJ\x04\x08\x02\x10\x03J\x04\x08\x0f\x10\x10J\x04\x08\t\x10\x0cR\x03\x62\x61rR\x03\x62\x61z\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"$\n\x17OptionalGroup_extension\x12\t\n\x01\x61\x18\x11 \x01(\x05\"$\n\x17RepeatedGroup_extension\x12\t\n\x01\x61\x18/ \x01(\x05\"\x98\x01\n\x13TestNestedExtension29\n\x04test\x12$.protobuf_unittest.TestAllExtensions\x18\xea\x07 \x01(\t:\x04test2F\n\x17nested_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18\xeb\x07 \x01(\t\"\xd5\x05\n\x0cTestRequired\x12\t\n\x01\x61\x18\x01 \x02(\x05\x12\x0e\n\x06\x64ummy2\x18\x02 \x01(\x05\x12\t\n\x01\x62\x18\x03 \x02(\x05\x12\x0e\n\x06\x64ummy4\x18\x04 \x01(\x05\x12\x0e\n\x06\x64ummy5\x18\x05 \x01(\x05\x12\x0e\n\x06\x64ummy6\x18\x06 \x01(\x05\x12\x0e\n\x06\x64ummy7\x18\x07 \x01(\x05\x12\x0e\n\x06\x64ummy8\x18\x08 \x01(\x05\x12\x0e\n\x06\x64ummy9\x18\t \x01(\x05\x12\x0f\n\x07\x64ummy10\x18\n \x01(\x05\x12\x0f\n\x07\x64ummy11\x18\x0b \x01(\x05\x12\x0f\n\x07\x64ummy12\x18\x0c \x01(\x05\x12\x0f\n\x07\x64ummy13\x18\r \x01(\x05\x12\x0f\n\x07\x64ummy14\x18\x0e \x01(\x05\x12\x0f\n\x07\x64ummy15\x18\x0f \x01(\x05\x12\x0f\n\x07\x64ummy16\x18\x10 \x01(\x05\x12\x0f\n\x07\x64ummy17\x18\x11 \x01(\x05\x12\x0f\n\x07\x64ummy18\x18\x12 \x01(\x05\x12\x0f\n\x07\x64ummy19\x18\x13 \x01(\x05\x12\x0f\n\x07\x64ummy20\x18\x14 \x01(\x05\x12\x0f\n\x07\x64ummy21\x18\x15 \x01(\x05\x12\x0f\n\x07\x64ummy22\x18\x16 \x01(\x05\x12\x0f\n\x07\x64ummy23\x18\x17 \x01(\x05\x12\x0f\n\x07\x64ummy24\x18\x18 \x01(\x05\x12\x0f\n\x07\x64ummy25\x18\x19 \x01(\x05\x12\x0f\n\x07\x64ummy26\x18\x1a \x01(\x05\x12\x0f\n\x07\x64ummy27\x18\x1b \x01(\x05\x12\x0f\n\x07\x64ummy28\x18\x1c \x01(\x05\x12\x0f\n\x07\x64ummy29\x18\x1d \x01(\x05\x12\x0f\n\x07\x64ummy30\x18\x1e \x01(\x05\x12\x0f\n\x07\x64ummy31\x18\x1f \x01(\x05\x12\x0f\n\x07\x64ummy32\x18 \x01(\x05\x12\t\n\x01\x63\x18! \x02(\x05\x32V\n\x06single\x12$.protobuf_unittest.TestAllExtensions\x18\xe8\x07 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired2U\n\x05multi\x12$.protobuf_unittest.TestAllExtensions\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\"\x9a\x01\n\x13TestRequiredForeign\x12\x39\n\x10optional_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\x39\n\x10repeated_message\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestRequired\x12\r\n\x05\x64ummy\x18\x03 \x01(\x05\"Z\n\x11TestForeignNested\x12\x45\n\x0e\x66oreign_nested\x18\x01 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage\"\x12\n\x10TestEmptyMessage\"*\n\x1eTestEmptyMessageWithExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"7\n\x1bTestMultipleExtensionRanges*\x04\x08*\x10+*\x06\x08\xaf \x10\x94!*\n\x08\x80\x80\x04\x10\x80\x80\x80\x80\x02\"4\n\x18TestReallyLargeTagNumber\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x02\x62\x62\x18\xff\xff\xff\x7f \x01(\x05\"U\n\x14TestRecursiveMessage\x12\x32\n\x01\x61\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestRecursiveMessage\x12\t\n\x01i\x18\x02 \x01(\x05\"K\n\x14TestMutualRecursionA\x12\x33\n\x02\x62\x62\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionB\"b\n\x14TestMutualRecursionB\x12\x32\n\x01\x61\x18\x01 \x01(\x0b\x32\'.protobuf_unittest.TestMutualRecursionA\x12\x16\n\x0eoptional_int32\x18\x02 \x01(\x05\"\xb3\x01\n\x12TestDupFieldNumber\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\x36\n\x03\x66oo\x18\x02 \x01(\n2).protobuf_unittest.TestDupFieldNumber.Foo\x12\x36\n\x03\x62\x61r\x18\x03 \x01(\n2).protobuf_unittest.TestDupFieldNumber.Bar\x1a\x10\n\x03\x46oo\x12\t\n\x01\x61\x18\x01 \x01(\x05\x1a\x10\n\x03\x42\x61r\x12\t\n\x01\x61\x18\x01 \x01(\x05\"L\n\x10TestEagerMessage\x12\x38\n\x0bsub_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesB\x02(\x00\"K\n\x0fTestLazyMessage\x12\x38\n\x0bsub_message\x18\x01 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesB\x02(\x01\"\x80\x02\n\x18TestNestedMessageHasBits\x12Z\n\x17optional_nested_message\x18\x01 \x01(\x0b\x32\x39.protobuf_unittest.TestNestedMessageHasBits.NestedMessage\x1a\x87\x01\n\rNestedMessage\x12$\n\x1cnestedmessage_repeated_int32\x18\x01 \x03(\x05\x12P\n%nestedmessage_repeated_foreignmessage\x18\x02 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\"\xe5\x03\n\x17TestCamelCaseFieldNames\x12\x16\n\x0ePrimitiveField\x18\x01 \x01(\x05\x12\x13\n\x0bStringField\x18\x02 \x01(\t\x12\x31\n\tEnumField\x18\x03 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12\x37\n\x0cMessageField\x18\x04 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12\x1c\n\x10StringPieceField\x18\x05 \x01(\tB\x02\x08\x02\x12\x15\n\tCordField\x18\x06 \x01(\tB\x02\x08\x01\x12\x1e\n\x16RepeatedPrimitiveField\x18\x07 \x03(\x05\x12\x1b\n\x13RepeatedStringField\x18\x08 \x03(\t\x12\x39\n\x11RepeatedEnumField\x18\t \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12?\n\x14RepeatedMessageField\x18\n \x03(\x0b\x32!.protobuf_unittest.ForeignMessage\x12$\n\x18RepeatedStringPieceField\x18\x0b \x03(\tB\x02\x08\x02\x12\x1d\n\x11RepeatedCordField\x18\x0c \x03(\tB\x02\x08\x01\"\xd5\x01\n\x12TestFieldOrderings\x12\x11\n\tmy_string\x18\x0b \x01(\t\x12\x0e\n\x06my_int\x18\x01 \x01(\x03\x12\x10\n\x08my_float\x18\x65 \x01(\x02\x12U\n\x17optional_nested_message\x18\xc8\x01 \x01(\x0b\x32\x33.protobuf_unittest.TestFieldOrderings.NestedMessage\x1a\'\n\rNestedMessage\x12\n\n\x02oo\x18\x02 \x01(\x03\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05*\x04\x08\x02\x10\x0b*\x04\x08\x0c\x10\x65\"\xb6\x07\n\x18TestExtremeDefaultValues\x12?\n\rescaped_bytes\x18\x01 \x01(\x0c:(\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\\"\\376\x12 \n\x0clarge_uint32\x18\x02 \x01(\r:\n4294967295\x12*\n\x0clarge_uint64\x18\x03 \x01(\x04:\x14\x31\x38\x34\x34\x36\x37\x34\x34\x30\x37\x33\x37\x30\x39\x35\x35\x31\x36\x31\x35\x12 \n\x0bsmall_int32\x18\x04 \x01(\x05:\x0b-2147483647\x12)\n\x0bsmall_int64\x18\x05 \x01(\x03:\x14-9223372036854775807\x12\'\n\x12really_small_int32\x18\x15 \x01(\x05:\x0b-2147483648\x12\x30\n\x12really_small_int64\x18\x16 \x01(\x03:\x14-9223372036854775808\x12\x18\n\x0butf8_string\x18\x06 \x01(\t:\x03\xe1\x88\xb4\x12\x15\n\nzero_float\x18\x07 \x01(\x02:\x01\x30\x12\x14\n\tone_float\x18\x08 \x01(\x02:\x01\x31\x12\x18\n\x0bsmall_float\x18\t \x01(\x02:\x03\x31.5\x12\x1e\n\x12negative_one_float\x18\n \x01(\x02:\x02-1\x12\x1c\n\x0enegative_float\x18\x0b \x01(\x02:\x04-1.5\x12\x1a\n\x0blarge_float\x18\x0c \x01(\x02:\x05\x32\x65+08\x12$\n\x14small_negative_float\x18\r \x01(\x02:\x06-8e-28\x12\x17\n\ninf_double\x18\x0e \x01(\x01:\x03inf\x12\x1c\n\x0eneg_inf_double\x18\x0f \x01(\x01:\x04-inf\x12\x17\n\nnan_double\x18\x10 \x01(\x01:\x03nan\x12\x16\n\tinf_float\x18\x11 \x01(\x02:\x03inf\x12\x1b\n\rneg_inf_float\x18\x12 \x01(\x02:\x04-inf\x12\x16\n\tnan_float\x18\x13 \x01(\x02:\x03nan\x12+\n\x0c\x63pp_trigraph\x18\x14 \x01(\t:\x15? ? ?? ?? ??? ??/ ??-\x12 \n\x10string_with_zero\x18\x17 \x01(\t:\x06hel\x00lo\x12\"\n\x0f\x62ytes_with_zero\x18\x18 \x01(\x0c:\twor\\000ld\x12(\n\x16string_piece_with_zero\x18\x19 \x01(\t:\x04\x61\x62\x00\x63\x42\x02\x08\x02\x12 \n\x0e\x63ord_with_zero\x18\x1a \x01(\t:\x04\x31\x32\x00\x33\x42\x02\x08\x01\x12&\n\x12replacement_string\x18\x1b \x01(\t:\n${unknown}\"K\n\x11SparseEnumMessage\x12\x36\n\x0bsparse_enum\x18\x01 \x01(\x0e\x32!.protobuf_unittest.TestSparseEnum\"\x19\n\tOneString\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\t\"\x1a\n\nMoreString\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\t\"\x18\n\x08OneBytes\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"\x19\n\tMoreBytes\x12\x0c\n\x04\x64\x61ta\x18\x01 \x03(\x0c\"\x1c\n\x0cInt32Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x05\"\x1d\n\rUint32Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\r\"\x1c\n\x0cInt64Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x03\"\x1d\n\rUint64Message\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x04\"\x1b\n\x0b\x42oolMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x08\"\xd0\x01\n\tTestOneof\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12\x36\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypesH\x00\x12\x39\n\x08\x66oogroup\x18\x04 \x01(\n2%.protobuf_unittest.TestOneof.FooGroupH\x00\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\t\n\x01\x62\x18\x06 \x01(\tB\x05\n\x03\x66oo\"\xe7\x01\n\x1cTestOneofBackwardsCompatible\x12\x0f\n\x07\x66oo_int\x18\x01 \x01(\x05\x12\x12\n\nfoo_string\x18\x02 \x01(\t\x12\x34\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12J\n\x08\x66oogroup\x18\x04 \x01(\n28.protobuf_unittest.TestOneofBackwardsCompatible.FooGroup\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\t\n\x01\x62\x18\x06 \x01(\t\"\x9e\x06\n\nTestOneof2\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12\x16\n\x08\x66oo_cord\x18\x03 \x01(\tB\x02\x08\x01H\x00\x12\x1e\n\x10\x66oo_string_piece\x18\x04 \x01(\tB\x02\x08\x02H\x00\x12\x13\n\tfoo_bytes\x18\x05 \x01(\x0cH\x00\x12<\n\x08\x66oo_enum\x18\x06 \x01(\x0e\x32(.protobuf_unittest.TestOneof2.NestedEnumH\x00\x12\x42\n\x0b\x66oo_message\x18\x07 \x01(\x0b\x32+.protobuf_unittest.TestOneof2.NestedMessageH\x00\x12:\n\x08\x66oogroup\x18\x08 \x01(\n2&.protobuf_unittest.TestOneof2.FooGroupH\x00\x12K\n\x10\x66oo_lazy_message\x18\x0b \x01(\x0b\x32+.protobuf_unittest.TestOneof2.NestedMessageB\x02(\x01H\x00\x12\x14\n\x07\x62\x61r_int\x18\x0c \x01(\x05:\x01\x35H\x01\x12\x1c\n\nbar_string\x18\r \x01(\t:\x06STRINGH\x01\x12\x1c\n\x08\x62\x61r_cord\x18\x0e \x01(\t:\x04\x43ORDB\x02\x08\x01H\x01\x12&\n\x10\x62\x61r_string_piece\x18\x0f \x01(\t:\x06SPIECEB\x02\x08\x02H\x01\x12\x1a\n\tbar_bytes\x18\x10 \x01(\x0c:\x05\x42YTESH\x01\x12\x41\n\x08\x62\x61r_enum\x18\x11 \x01(\x0e\x32(.protobuf_unittest.TestOneof2.NestedEnum:\x03\x42\x41RH\x01\x12\x0f\n\x07\x62\x61z_int\x18\x12 \x01(\x05\x12\x17\n\nbaz_string\x18\x13 \x01(\t:\x03\x42\x41Z\x1a \n\x08\x46ooGroup\x12\t\n\x01\x61\x18\t \x01(\x05\x12\t\n\x01\x62\x18\n \x01(\t\x1a\x33\n\rNestedMessage\x12\x0f\n\x07qux_int\x18\x01 \x01(\x03\x12\x11\n\tcorge_int\x18\x02 \x03(\x05\"\'\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x42\x05\n\x03\x66ooB\x05\n\x03\x62\x61r\"\xb8\x01\n\x11TestRequiredOneof\x12\x11\n\x07\x66oo_int\x18\x01 \x01(\x05H\x00\x12\x14\n\nfoo_string\x18\x02 \x01(\tH\x00\x12I\n\x0b\x66oo_message\x18\x03 \x01(\x0b\x32\x32.protobuf_unittest.TestRequiredOneof.NestedMessageH\x00\x1a(\n\rNestedMessage\x12\x17\n\x0frequired_double\x18\x01 \x02(\x01\x42\x05\n\x03\x66oo\"\xaa\x03\n\x0fTestPackedTypes\x12\x18\n\x0cpacked_int32\x18Z \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18[ \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18\\ \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18] \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18` \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x01\x12\x37\n\x0bpacked_enum\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x01\"\xc8\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0eunpacked_int32\x18Z \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_int64\x18[ \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0funpacked_uint32\x18\\ \x03(\rB\x02\x10\x00\x12\x1b\n\x0funpacked_uint64\x18] \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0funpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed32\x18` \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10unpacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11unpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0eunpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0funpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x00\x12\x19\n\runpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x00\x12\x39\n\runpacked_enum\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x00\" \n\x14TestPackedExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\"\n\x16TestUnpackedExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02\"\x99\x04\n\x15TestDynamicExtensions\x12\x19\n\x10scalar_extension\x18\xd0\x0f \x01(\x07\x12\x37\n\x0e\x65num_extension\x18\xd1\x0f \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum\x12Y\n\x16\x64ynamic_enum_extension\x18\xd2\x0f \x01(\x0e\x32\x38.protobuf_unittest.TestDynamicExtensions.DynamicEnumType\x12=\n\x11message_extension\x18\xd3\x0f \x01(\x0b\x32!.protobuf_unittest.ForeignMessage\x12_\n\x19\x64ynamic_message_extension\x18\xd4\x0f \x01(\x0b\x32;.protobuf_unittest.TestDynamicExtensions.DynamicMessageType\x12\x1b\n\x12repeated_extension\x18\xd5\x0f \x03(\t\x12\x1d\n\x10packed_extension\x18\xd6\x0f \x03(\x11\x42\x02\x10\x01\x1a,\n\x12\x44ynamicMessageType\x12\x16\n\rdynamic_field\x18\xb4\x10 \x01(\x05\"G\n\x0f\x44ynamicEnumType\x12\x10\n\x0b\x44YNAMIC_FOO\x10\x98\x11\x12\x10\n\x0b\x44YNAMIC_BAR\x10\x99\x11\x12\x10\n\x0b\x44YNAMIC_BAZ\x10\x9a\x11\"\xc0\x01\n#TestRepeatedScalarDifferentTagSizes\x12\x18\n\x10repeated_fixed32\x18\x0c \x03(\x07\x12\x16\n\x0erepeated_int32\x18\r \x03(\x05\x12\x19\n\x10repeated_fixed64\x18\xfe\x0f \x03(\x06\x12\x17\n\x0erepeated_int64\x18\xff\x0f \x03(\x03\x12\x18\n\x0erepeated_float\x18\xfe\xff\x0f \x03(\x02\x12\x19\n\x0frepeated_uint64\x18\xff\xff\x0f \x03(\x04\"\xf7\t\n\x10TestParsingMerge\x12;\n\x12required_all_types\x18\x01 \x02(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12;\n\x12optional_all_types\x18\x02 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12;\n\x12repeated_all_types\x18\x03 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12H\n\roptionalgroup\x18\n \x01(\n21.protobuf_unittest.TestParsingMerge.OptionalGroup\x12H\n\rrepeatedgroup\x18\x14 \x03(\n21.protobuf_unittest.TestParsingMerge.RepeatedGroup\x1a\xaa\x04\n\x17RepeatedFieldsGenerator\x12/\n\x06\x66ield1\x18\x01 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12/\n\x06\x66ield2\x18\x02 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12/\n\x06\x66ield3\x18\x03 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12R\n\x06group1\x18\n \x03(\n2B.protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1\x12R\n\x06group2\x18\x14 \x03(\n2B.protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2\x12.\n\x04\x65xt1\x18\xe8\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x12.\n\x04\x65xt2\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1a\x39\n\x06Group1\x12/\n\x06\x66ield1\x18\x0b \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1a\x39\n\x06Group2\x12/\n\x06\x66ield1\x18\x15 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1aR\n\rOptionalGroup\x12\x41\n\x18optional_group_all_types\x18\x0b \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\x1aR\n\rRepeatedGroup\x12\x41\n\x18repeated_group_all_types\x18\x15 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\x32[\n\x0coptional_ext\x12#.protobuf_unittest.TestParsingMerge\x18\xe8\x07 \x01(\x0b\x32\x1f.protobuf_unittest.TestAllTypes2[\n\x0crepeated_ext\x12#.protobuf_unittest.TestParsingMerge\x18\xe9\x07 \x03(\x0b\x32\x1f.protobuf_unittest.TestAllTypes\"D\n\x1bTestCommentInjectionMessage\x12%\n\x01\x61\x18\x01 \x01(\t:\x1a*/ <- Neither should this.\"\x0c\n\nFooRequest\"\r\n\x0b\x46ooResponse\"\x12\n\x10\x46ooClientMessage\"\x12\n\x10\x46ooServerMessage\"\x0c\n\nBarRequest\"\r\n\x0b\x42\x61rResponse*@\n\x0b\x46oreignEnum\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06*K\n\x14TestEnumWithDupValue\x12\x08\n\x04\x46OO1\x10\x01\x12\x08\n\x04\x42\x41R1\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x08\n\x04\x46OO2\x10\x01\x12\x08\n\x04\x42\x41R2\x10\x02\x1a\x02\x10\x01*\x89\x01\n\x0eTestSparseEnum\x12\x0c\n\x08SPARSE_A\x10{\x12\x0e\n\x08SPARSE_B\x10\xa6\xe7\x03\x12\x0f\n\x08SPARSE_C\x10\xb2\xb1\x80\x06\x12\x15\n\x08SPARSE_D\x10\xf1\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x15\n\x08SPARSE_E\x10\xb4\xde\xfc\xff\xff\xff\xff\xff\xff\x01\x12\x0c\n\x08SPARSE_F\x10\x00\x12\x0c\n\x08SPARSE_G\x10\x02\x32\x99\x01\n\x0bTestService\x12\x44\n\x03\x46oo\x12\x1d.protobuf_unittest.FooRequest\x1a\x1e.protobuf_unittest.FooResponse\x12\x44\n\x03\x42\x61r\x12\x1d.protobuf_unittest.BarRequest\x1a\x1e.protobuf_unittest.BarResponse:F\n\x18optional_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x01 \x01(\x05:F\n\x18optional_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x02 \x01(\x03:G\n\x19optional_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x03 \x01(\r:G\n\x19optional_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x04 \x01(\x04:G\n\x19optional_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x05 \x01(\x11:G\n\x19optional_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x06 \x01(\x12:H\n\x1aoptional_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x07 \x01(\x07:H\n\x1aoptional_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x08 \x01(\x06:I\n\x1boptional_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\t \x01(\x0f:I\n\x1boptional_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\n \x01(\x10:F\n\x18optional_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0b \x01(\x02:G\n\x19optional_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0c \x01(\x01:E\n\x17optional_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18\r \x01(\x08:G\n\x19optional_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0e \x01(\t:F\n\x18optional_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x0f \x01(\x0c:q\n\x17optionalgroup_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x10 \x01(\n2*.protobuf_unittest.OptionalGroup_extension:~\n!optional_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x12 \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:s\n\"optional_foreign_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x13 \x01(\x0b\x32!.protobuf_unittest.ForeignMessage:x\n!optional_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage:x\n\x1eoptional_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x15 \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:m\n\x1foptional_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x16 \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:r\n\x1eoptional_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x17 \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:Q\n\x1foptional_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x18 \x01(\tB\x02\x08\x02:I\n\x17optional_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x19 \x01(\tB\x02\x08\x01:\x85\x01\n(optional_public_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage:\x80\x01\n\x1foptional_lazy_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1b \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01:F\n\x18repeated_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x1f \x03(\x05:F\n\x18repeated_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18 \x03(\x03:G\n\x19repeated_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18! \x03(\r:G\n\x19repeated_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\" \x03(\x04:G\n\x19repeated_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18# \x03(\x11:G\n\x19repeated_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18$ \x03(\x12:H\n\x1arepeated_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18% \x03(\x07:H\n\x1arepeated_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18& \x03(\x06:I\n\x1brepeated_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\' \x03(\x0f:I\n\x1brepeated_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18( \x03(\x10:F\n\x18repeated_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18) \x03(\x02:G\n\x19repeated_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18* \x03(\x01:E\n\x17repeated_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18+ \x03(\x08:G\n\x19repeated_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18, \x03(\t:F\n\x18repeated_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18- \x03(\x0c:q\n\x17repeatedgroup_extension\x12$.protobuf_unittest.TestAllExtensions\x18. \x03(\n2*.protobuf_unittest.RepeatedGroup_extension:~\n!repeated_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x30 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:s\n\"repeated_foreign_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x31 \x03(\x0b\x32!.protobuf_unittest.ForeignMessage:x\n!repeated_import_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage:x\n\x1erepeated_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x33 \x03(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:m\n\x1frepeated_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x34 \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:r\n\x1erepeated_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x35 \x03(\x0e\x32$.protobuf_unittest_import.ImportEnum:Q\n\x1frepeated_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x36 \x03(\tB\x02\x08\x02:I\n\x17repeated_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x37 \x03(\tB\x02\x08\x01:\x80\x01\n\x1frepeated_lazy_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x39 \x03(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessageB\x02(\x01:I\n\x17\x64\x65\x66\x61ult_int32_extension\x12$.protobuf_unittest.TestAllExtensions\x18= \x01(\x05:\x02\x34\x31:I\n\x17\x64\x65\x66\x61ult_int64_extension\x12$.protobuf_unittest.TestAllExtensions\x18> \x01(\x03:\x02\x34\x32:J\n\x18\x64\x65\x66\x61ult_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18? \x01(\r:\x02\x34\x33:J\n\x18\x64\x65\x66\x61ult_uint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18@ \x01(\x04:\x02\x34\x34:K\n\x18\x64\x65\x66\x61ult_sint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x41 \x01(\x11:\x03-45:J\n\x18\x64\x65\x66\x61ult_sint64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x42 \x01(\x12:\x02\x34\x36:K\n\x19\x64\x65\x66\x61ult_fixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x43 \x01(\x07:\x02\x34\x37:K\n\x19\x64\x65\x66\x61ult_fixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x44 \x01(\x06:\x02\x34\x38:L\n\x1a\x64\x65\x66\x61ult_sfixed32_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x45 \x01(\x0f:\x02\x34\x39:M\n\x1a\x64\x65\x66\x61ult_sfixed64_extension\x12$.protobuf_unittest.TestAllExtensions\x18\x46 \x01(\x10:\x03-50:K\n\x17\x64\x65\x66\x61ult_float_extension\x12$.protobuf_unittest.TestAllExtensions\x18G \x01(\x02:\x04\x35\x31.5:M\n\x18\x64\x65\x66\x61ult_double_extension\x12$.protobuf_unittest.TestAllExtensions\x18H \x01(\x01:\x05\x35\x32\x30\x30\x30:J\n\x16\x64\x65\x66\x61ult_bool_extension\x12$.protobuf_unittest.TestAllExtensions\x18I \x01(\x08:\x04true:M\n\x18\x64\x65\x66\x61ult_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18J \x01(\t:\x05hello:L\n\x17\x64\x65\x66\x61ult_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18K \x01(\x0c:\x05world:|\n\x1d\x64\x65\x66\x61ult_nested_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18Q \x01(\x0e\x32*.protobuf_unittest.TestAllTypes.NestedEnum:\x03\x42\x41R:y\n\x1e\x64\x65\x66\x61ult_foreign_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18R \x01(\x0e\x32\x1e.protobuf_unittest.ForeignEnum:\x0b\x46OREIGN_BAR:}\n\x1d\x64\x65\x66\x61ult_import_enum_extension\x12$.protobuf_unittest.TestAllExtensions\x18S \x01(\x0e\x32$.protobuf_unittest_import.ImportEnum:\nIMPORT_BAR:U\n\x1e\x64\x65\x66\x61ult_string_piece_extension\x12$.protobuf_unittest.TestAllExtensions\x18T \x01(\t:\x03\x61\x62\x63\x42\x02\x08\x02:M\n\x16\x64\x65\x66\x61ult_cord_extension\x12$.protobuf_unittest.TestAllExtensions\x18U \x01(\t:\x03\x31\x32\x33\x42\x02\x08\x01:D\n\x16oneof_uint32_extension\x12$.protobuf_unittest.TestAllExtensions\x18o \x01(\r:{\n\x1eoneof_nested_message_extension\x12$.protobuf_unittest.TestAllExtensions\x18p \x01(\x0b\x32-.protobuf_unittest.TestAllTypes.NestedMessage:D\n\x16oneof_string_extension\x12$.protobuf_unittest.TestAllExtensions\x18q \x01(\t:C\n\x15oneof_bytes_extension\x12$.protobuf_unittest.TestAllExtensions\x18r \x01(\x0c:B\n\x13my_extension_string\x12%.protobuf_unittest.TestFieldOrderings\x18\x32 \x01(\t:?\n\x10my_extension_int\x12%.protobuf_unittest.TestFieldOrderings\x18\x05 \x01(\x05:K\n\x16packed_int32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18Z \x03(\x05\x42\x02\x10\x01:K\n\x16packed_int64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18[ \x03(\x03\x42\x02\x10\x01:L\n\x17packed_uint32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\\ \x03(\rB\x02\x10\x01:L\n\x17packed_uint64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18] \x03(\x04\x42\x02\x10\x01:L\n\x17packed_sint32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18^ \x03(\x11\x42\x02\x10\x01:L\n\x17packed_sint64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18_ \x03(\x12\x42\x02\x10\x01:M\n\x18packed_fixed32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18` \x03(\x07\x42\x02\x10\x01:M\n\x18packed_fixed64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x61 \x03(\x06\x42\x02\x10\x01:N\n\x19packed_sfixed32_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x62 \x03(\x0f\x42\x02\x10\x01:N\n\x19packed_sfixed64_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x63 \x03(\x10\x42\x02\x10\x01:K\n\x16packed_float_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x64 \x03(\x02\x42\x02\x10\x01:L\n\x17packed_double_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x65 \x03(\x01\x42\x02\x10\x01:J\n\x15packed_bool_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18\x66 \x03(\x08\x42\x02\x10\x01:j\n\x15packed_enum_extension\x12\'.protobuf_unittest.TestPackedExtensions\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x01:O\n\x18unpacked_int32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18Z \x03(\x05\x42\x02\x10\x00:O\n\x18unpacked_int64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18[ \x03(\x03\x42\x02\x10\x00:P\n\x19unpacked_uint32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\\ \x03(\rB\x02\x10\x00:P\n\x19unpacked_uint64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18] \x03(\x04\x42\x02\x10\x00:P\n\x19unpacked_sint32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18^ \x03(\x11\x42\x02\x10\x00:P\n\x19unpacked_sint64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18_ \x03(\x12\x42\x02\x10\x00:Q\n\x1aunpacked_fixed32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18` \x03(\x07\x42\x02\x10\x00:Q\n\x1aunpacked_fixed64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x61 \x03(\x06\x42\x02\x10\x00:R\n\x1bunpacked_sfixed32_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x62 \x03(\x0f\x42\x02\x10\x00:R\n\x1bunpacked_sfixed64_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x63 \x03(\x10\x42\x02\x10\x00:O\n\x18unpacked_float_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x64 \x03(\x02\x42\x02\x10\x00:P\n\x19unpacked_double_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x65 \x03(\x01\x42\x02\x10\x00:N\n\x17unpacked_bool_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18\x66 \x03(\x08\x42\x02\x10\x00:n\n\x17unpacked_enum_extension\x12).protobuf_unittest.TestUnpackedExtensions\x18g \x03(\x0e\x32\x1e.protobuf_unittest.ForeignEnumB\x02\x10\x00\x42\x1d\x42\rUnittestProtoH\x01\x80\x01\x01\x88\x01\x01\x90\x01\x01\xf8\x01\x01') + , + dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_FOREIGNENUM = _descriptor.EnumDescriptor( + name='ForeignEnum', + full_name='protobuf_unittest.ForeignEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOREIGN_FOO', index=0, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_BAR', index=1, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_BAZ', index=2, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=12434, + serialized_end=12498, +) +_sym_db.RegisterEnumDescriptor(_FOREIGNENUM) + +ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM) +_TESTENUMWITHDUPVALUE = _descriptor.EnumDescriptor( + name='TestEnumWithDupValue', + full_name='protobuf_unittest.TestEnumWithDupValue', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO1', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR1', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAZ', index=2, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOO2', index=3, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR2', index=4, number=2, + options=None, + type=None), + ], + containing_type=None, + options=_descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\020\001')), + serialized_start=12500, + serialized_end=12575, +) +_sym_db.RegisterEnumDescriptor(_TESTENUMWITHDUPVALUE) + +TestEnumWithDupValue = enum_type_wrapper.EnumTypeWrapper(_TESTENUMWITHDUPVALUE) +_TESTSPARSEENUM = _descriptor.EnumDescriptor( + name='TestSparseEnum', + full_name='protobuf_unittest.TestSparseEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SPARSE_A', index=0, number=123, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPARSE_B', index=1, number=62374, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPARSE_C', index=2, number=12589234, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPARSE_D', index=3, number=-15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPARSE_E', index=4, number=-53452, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPARSE_F', index=5, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPARSE_G', index=6, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=12578, + serialized_end=12715, +) +_sym_db.RegisterEnumDescriptor(_TESTSPARSEENUM) + +TestSparseEnum = enum_type_wrapper.EnumTypeWrapper(_TESTSPARSEENUM) +FOREIGN_FOO = 4 +FOREIGN_BAR = 5 +FOREIGN_BAZ = 6 +FOO1 = 1 +BAR1 = 2 +BAZ = 3 +FOO2 = 1 +BAR2 = 2 +SPARSE_A = 123 +SPARSE_B = 62374 +SPARSE_C = 12589234 +SPARSE_D = -15 +SPARSE_E = -53452 +SPARSE_F = 0 +SPARSE_G = 2 + +OPTIONAL_INT32_EXTENSION_FIELD_NUMBER = 1 +optional_int32_extension = _descriptor.FieldDescriptor( + name='optional_int32_extension', full_name='protobuf_unittest.optional_int32_extension', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_INT64_EXTENSION_FIELD_NUMBER = 2 +optional_int64_extension = _descriptor.FieldDescriptor( + name='optional_int64_extension', full_name='protobuf_unittest.optional_int64_extension', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_UINT32_EXTENSION_FIELD_NUMBER = 3 +optional_uint32_extension = _descriptor.FieldDescriptor( + name='optional_uint32_extension', full_name='protobuf_unittest.optional_uint32_extension', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_UINT64_EXTENSION_FIELD_NUMBER = 4 +optional_uint64_extension = _descriptor.FieldDescriptor( + name='optional_uint64_extension', full_name='protobuf_unittest.optional_uint64_extension', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_SINT32_EXTENSION_FIELD_NUMBER = 5 +optional_sint32_extension = _descriptor.FieldDescriptor( + name='optional_sint32_extension', full_name='protobuf_unittest.optional_sint32_extension', index=4, + number=5, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_SINT64_EXTENSION_FIELD_NUMBER = 6 +optional_sint64_extension = _descriptor.FieldDescriptor( + name='optional_sint64_extension', full_name='protobuf_unittest.optional_sint64_extension', index=5, + number=6, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_FIXED32_EXTENSION_FIELD_NUMBER = 7 +optional_fixed32_extension = _descriptor.FieldDescriptor( + name='optional_fixed32_extension', full_name='protobuf_unittest.optional_fixed32_extension', index=6, + number=7, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_FIXED64_EXTENSION_FIELD_NUMBER = 8 +optional_fixed64_extension = _descriptor.FieldDescriptor( + name='optional_fixed64_extension', full_name='protobuf_unittest.optional_fixed64_extension', index=7, + number=8, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_SFIXED32_EXTENSION_FIELD_NUMBER = 9 +optional_sfixed32_extension = _descriptor.FieldDescriptor( + name='optional_sfixed32_extension', full_name='protobuf_unittest.optional_sfixed32_extension', index=8, + number=9, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_SFIXED64_EXTENSION_FIELD_NUMBER = 10 +optional_sfixed64_extension = _descriptor.FieldDescriptor( + name='optional_sfixed64_extension', full_name='protobuf_unittest.optional_sfixed64_extension', index=9, + number=10, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_FLOAT_EXTENSION_FIELD_NUMBER = 11 +optional_float_extension = _descriptor.FieldDescriptor( + name='optional_float_extension', full_name='protobuf_unittest.optional_float_extension', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_DOUBLE_EXTENSION_FIELD_NUMBER = 12 +optional_double_extension = _descriptor.FieldDescriptor( + name='optional_double_extension', full_name='protobuf_unittest.optional_double_extension', index=11, + number=12, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_BOOL_EXTENSION_FIELD_NUMBER = 13 +optional_bool_extension = _descriptor.FieldDescriptor( + name='optional_bool_extension', full_name='protobuf_unittest.optional_bool_extension', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_STRING_EXTENSION_FIELD_NUMBER = 14 +optional_string_extension = _descriptor.FieldDescriptor( + name='optional_string_extension', full_name='protobuf_unittest.optional_string_extension', index=13, + number=14, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_BYTES_EXTENSION_FIELD_NUMBER = 15 +optional_bytes_extension = _descriptor.FieldDescriptor( + name='optional_bytes_extension', full_name='protobuf_unittest.optional_bytes_extension', index=14, + number=15, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONALGROUP_EXTENSION_FIELD_NUMBER = 16 +optionalgroup_extension = _descriptor.FieldDescriptor( + name='optionalgroup_extension', full_name='protobuf_unittest.optionalgroup_extension', index=15, + number=16, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 18 +optional_nested_message_extension = _descriptor.FieldDescriptor( + name='optional_nested_message_extension', full_name='protobuf_unittest.optional_nested_message_extension', index=16, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_FOREIGN_MESSAGE_EXTENSION_FIELD_NUMBER = 19 +optional_foreign_message_extension = _descriptor.FieldDescriptor( + name='optional_foreign_message_extension', full_name='protobuf_unittest.optional_foreign_message_extension', index=17, + number=19, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 20 +optional_import_message_extension = _descriptor.FieldDescriptor( + name='optional_import_message_extension', full_name='protobuf_unittest.optional_import_message_extension', index=18, + number=20, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 21 +optional_nested_enum_extension = _descriptor.FieldDescriptor( + name='optional_nested_enum_extension', full_name='protobuf_unittest.optional_nested_enum_extension', index=19, + number=21, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 22 +optional_foreign_enum_extension = _descriptor.FieldDescriptor( + name='optional_foreign_enum_extension', full_name='protobuf_unittest.optional_foreign_enum_extension', index=20, + number=22, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 23 +optional_import_enum_extension = _descriptor.FieldDescriptor( + name='optional_import_enum_extension', full_name='protobuf_unittest.optional_import_enum_extension', index=21, + number=23, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=7, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_STRING_PIECE_EXTENSION_FIELD_NUMBER = 24 +optional_string_piece_extension = _descriptor.FieldDescriptor( + name='optional_string_piece_extension', full_name='protobuf_unittest.optional_string_piece_extension', index=22, + number=24, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))) +OPTIONAL_CORD_EXTENSION_FIELD_NUMBER = 25 +optional_cord_extension = _descriptor.FieldDescriptor( + name='optional_cord_extension', full_name='protobuf_unittest.optional_cord_extension', index=23, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))) +OPTIONAL_PUBLIC_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 26 +optional_public_import_message_extension = _descriptor.FieldDescriptor( + name='optional_public_import_message_extension', full_name='protobuf_unittest.optional_public_import_message_extension', index=24, + number=26, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +OPTIONAL_LAZY_MESSAGE_EXTENSION_FIELD_NUMBER = 27 +optional_lazy_message_extension = _descriptor.FieldDescriptor( + name='optional_lazy_message_extension', full_name='protobuf_unittest.optional_lazy_message_extension', index=25, + number=27, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))) +REPEATED_INT32_EXTENSION_FIELD_NUMBER = 31 +repeated_int32_extension = _descriptor.FieldDescriptor( + name='repeated_int32_extension', full_name='protobuf_unittest.repeated_int32_extension', index=26, + number=31, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_INT64_EXTENSION_FIELD_NUMBER = 32 +repeated_int64_extension = _descriptor.FieldDescriptor( + name='repeated_int64_extension', full_name='protobuf_unittest.repeated_int64_extension', index=27, + number=32, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_UINT32_EXTENSION_FIELD_NUMBER = 33 +repeated_uint32_extension = _descriptor.FieldDescriptor( + name='repeated_uint32_extension', full_name='protobuf_unittest.repeated_uint32_extension', index=28, + number=33, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_UINT64_EXTENSION_FIELD_NUMBER = 34 +repeated_uint64_extension = _descriptor.FieldDescriptor( + name='repeated_uint64_extension', full_name='protobuf_unittest.repeated_uint64_extension', index=29, + number=34, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_SINT32_EXTENSION_FIELD_NUMBER = 35 +repeated_sint32_extension = _descriptor.FieldDescriptor( + name='repeated_sint32_extension', full_name='protobuf_unittest.repeated_sint32_extension', index=30, + number=35, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_SINT64_EXTENSION_FIELD_NUMBER = 36 +repeated_sint64_extension = _descriptor.FieldDescriptor( + name='repeated_sint64_extension', full_name='protobuf_unittest.repeated_sint64_extension', index=31, + number=36, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_FIXED32_EXTENSION_FIELD_NUMBER = 37 +repeated_fixed32_extension = _descriptor.FieldDescriptor( + name='repeated_fixed32_extension', full_name='protobuf_unittest.repeated_fixed32_extension', index=32, + number=37, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_FIXED64_EXTENSION_FIELD_NUMBER = 38 +repeated_fixed64_extension = _descriptor.FieldDescriptor( + name='repeated_fixed64_extension', full_name='protobuf_unittest.repeated_fixed64_extension', index=33, + number=38, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_SFIXED32_EXTENSION_FIELD_NUMBER = 39 +repeated_sfixed32_extension = _descriptor.FieldDescriptor( + name='repeated_sfixed32_extension', full_name='protobuf_unittest.repeated_sfixed32_extension', index=34, + number=39, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_SFIXED64_EXTENSION_FIELD_NUMBER = 40 +repeated_sfixed64_extension = _descriptor.FieldDescriptor( + name='repeated_sfixed64_extension', full_name='protobuf_unittest.repeated_sfixed64_extension', index=35, + number=40, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_FLOAT_EXTENSION_FIELD_NUMBER = 41 +repeated_float_extension = _descriptor.FieldDescriptor( + name='repeated_float_extension', full_name='protobuf_unittest.repeated_float_extension', index=36, + number=41, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_DOUBLE_EXTENSION_FIELD_NUMBER = 42 +repeated_double_extension = _descriptor.FieldDescriptor( + name='repeated_double_extension', full_name='protobuf_unittest.repeated_double_extension', index=37, + number=42, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_BOOL_EXTENSION_FIELD_NUMBER = 43 +repeated_bool_extension = _descriptor.FieldDescriptor( + name='repeated_bool_extension', full_name='protobuf_unittest.repeated_bool_extension', index=38, + number=43, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_STRING_EXTENSION_FIELD_NUMBER = 44 +repeated_string_extension = _descriptor.FieldDescriptor( + name='repeated_string_extension', full_name='protobuf_unittest.repeated_string_extension', index=39, + number=44, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_BYTES_EXTENSION_FIELD_NUMBER = 45 +repeated_bytes_extension = _descriptor.FieldDescriptor( + name='repeated_bytes_extension', full_name='protobuf_unittest.repeated_bytes_extension', index=40, + number=45, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATEDGROUP_EXTENSION_FIELD_NUMBER = 46 +repeatedgroup_extension = _descriptor.FieldDescriptor( + name='repeatedgroup_extension', full_name='protobuf_unittest.repeatedgroup_extension', index=41, + number=46, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 48 +repeated_nested_message_extension = _descriptor.FieldDescriptor( + name='repeated_nested_message_extension', full_name='protobuf_unittest.repeated_nested_message_extension', index=42, + number=48, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_FOREIGN_MESSAGE_EXTENSION_FIELD_NUMBER = 49 +repeated_foreign_message_extension = _descriptor.FieldDescriptor( + name='repeated_foreign_message_extension', full_name='protobuf_unittest.repeated_foreign_message_extension', index=43, + number=49, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_IMPORT_MESSAGE_EXTENSION_FIELD_NUMBER = 50 +repeated_import_message_extension = _descriptor.FieldDescriptor( + name='repeated_import_message_extension', full_name='protobuf_unittest.repeated_import_message_extension', index=44, + number=50, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 51 +repeated_nested_enum_extension = _descriptor.FieldDescriptor( + name='repeated_nested_enum_extension', full_name='protobuf_unittest.repeated_nested_enum_extension', index=45, + number=51, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 52 +repeated_foreign_enum_extension = _descriptor.FieldDescriptor( + name='repeated_foreign_enum_extension', full_name='protobuf_unittest.repeated_foreign_enum_extension', index=46, + number=52, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 53 +repeated_import_enum_extension = _descriptor.FieldDescriptor( + name='repeated_import_enum_extension', full_name='protobuf_unittest.repeated_import_enum_extension', index=47, + number=53, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +REPEATED_STRING_PIECE_EXTENSION_FIELD_NUMBER = 54 +repeated_string_piece_extension = _descriptor.FieldDescriptor( + name='repeated_string_piece_extension', full_name='protobuf_unittest.repeated_string_piece_extension', index=48, + number=54, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))) +REPEATED_CORD_EXTENSION_FIELD_NUMBER = 55 +repeated_cord_extension = _descriptor.FieldDescriptor( + name='repeated_cord_extension', full_name='protobuf_unittest.repeated_cord_extension', index=49, + number=55, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))) +REPEATED_LAZY_MESSAGE_EXTENSION_FIELD_NUMBER = 57 +repeated_lazy_message_extension = _descriptor.FieldDescriptor( + name='repeated_lazy_message_extension', full_name='protobuf_unittest.repeated_lazy_message_extension', index=50, + number=57, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))) +DEFAULT_INT32_EXTENSION_FIELD_NUMBER = 61 +default_int32_extension = _descriptor.FieldDescriptor( + name='default_int32_extension', full_name='protobuf_unittest.default_int32_extension', index=51, + number=61, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=41, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_INT64_EXTENSION_FIELD_NUMBER = 62 +default_int64_extension = _descriptor.FieldDescriptor( + name='default_int64_extension', full_name='protobuf_unittest.default_int64_extension', index=52, + number=62, type=3, cpp_type=2, label=1, + has_default_value=True, default_value=42, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_UINT32_EXTENSION_FIELD_NUMBER = 63 +default_uint32_extension = _descriptor.FieldDescriptor( + name='default_uint32_extension', full_name='protobuf_unittest.default_uint32_extension', index=53, + number=63, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=43, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_UINT64_EXTENSION_FIELD_NUMBER = 64 +default_uint64_extension = _descriptor.FieldDescriptor( + name='default_uint64_extension', full_name='protobuf_unittest.default_uint64_extension', index=54, + number=64, type=4, cpp_type=4, label=1, + has_default_value=True, default_value=44, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_SINT32_EXTENSION_FIELD_NUMBER = 65 +default_sint32_extension = _descriptor.FieldDescriptor( + name='default_sint32_extension', full_name='protobuf_unittest.default_sint32_extension', index=55, + number=65, type=17, cpp_type=1, label=1, + has_default_value=True, default_value=-45, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_SINT64_EXTENSION_FIELD_NUMBER = 66 +default_sint64_extension = _descriptor.FieldDescriptor( + name='default_sint64_extension', full_name='protobuf_unittest.default_sint64_extension', index=56, + number=66, type=18, cpp_type=2, label=1, + has_default_value=True, default_value=46, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_FIXED32_EXTENSION_FIELD_NUMBER = 67 +default_fixed32_extension = _descriptor.FieldDescriptor( + name='default_fixed32_extension', full_name='protobuf_unittest.default_fixed32_extension', index=57, + number=67, type=7, cpp_type=3, label=1, + has_default_value=True, default_value=47, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_FIXED64_EXTENSION_FIELD_NUMBER = 68 +default_fixed64_extension = _descriptor.FieldDescriptor( + name='default_fixed64_extension', full_name='protobuf_unittest.default_fixed64_extension', index=58, + number=68, type=6, cpp_type=4, label=1, + has_default_value=True, default_value=48, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_SFIXED32_EXTENSION_FIELD_NUMBER = 69 +default_sfixed32_extension = _descriptor.FieldDescriptor( + name='default_sfixed32_extension', full_name='protobuf_unittest.default_sfixed32_extension', index=59, + number=69, type=15, cpp_type=1, label=1, + has_default_value=True, default_value=49, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_SFIXED64_EXTENSION_FIELD_NUMBER = 70 +default_sfixed64_extension = _descriptor.FieldDescriptor( + name='default_sfixed64_extension', full_name='protobuf_unittest.default_sfixed64_extension', index=60, + number=70, type=16, cpp_type=2, label=1, + has_default_value=True, default_value=-50, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_FLOAT_EXTENSION_FIELD_NUMBER = 71 +default_float_extension = _descriptor.FieldDescriptor( + name='default_float_extension', full_name='protobuf_unittest.default_float_extension', index=61, + number=71, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=51.5, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_DOUBLE_EXTENSION_FIELD_NUMBER = 72 +default_double_extension = _descriptor.FieldDescriptor( + name='default_double_extension', full_name='protobuf_unittest.default_double_extension', index=62, + number=72, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=52000, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_BOOL_EXTENSION_FIELD_NUMBER = 73 +default_bool_extension = _descriptor.FieldDescriptor( + name='default_bool_extension', full_name='protobuf_unittest.default_bool_extension', index=63, + number=73, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_STRING_EXTENSION_FIELD_NUMBER = 74 +default_string_extension = _descriptor.FieldDescriptor( + name='default_string_extension', full_name='protobuf_unittest.default_string_extension', index=64, + number=74, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("hello").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_BYTES_EXTENSION_FIELD_NUMBER = 75 +default_bytes_extension = _descriptor.FieldDescriptor( + name='default_bytes_extension', full_name='protobuf_unittest.default_bytes_extension', index=65, + number=75, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("world"), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_NESTED_ENUM_EXTENSION_FIELD_NUMBER = 81 +default_nested_enum_extension = _descriptor.FieldDescriptor( + name='default_nested_enum_extension', full_name='protobuf_unittest.default_nested_enum_extension', index=66, + number=81, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_FOREIGN_ENUM_EXTENSION_FIELD_NUMBER = 82 +default_foreign_enum_extension = _descriptor.FieldDescriptor( + name='default_foreign_enum_extension', full_name='protobuf_unittest.default_foreign_enum_extension', index=67, + number=82, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_IMPORT_ENUM_EXTENSION_FIELD_NUMBER = 83 +default_import_enum_extension = _descriptor.FieldDescriptor( + name='default_import_enum_extension', full_name='protobuf_unittest.default_import_enum_extension', index=68, + number=83, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=8, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +DEFAULT_STRING_PIECE_EXTENSION_FIELD_NUMBER = 84 +default_string_piece_extension = _descriptor.FieldDescriptor( + name='default_string_piece_extension', full_name='protobuf_unittest.default_string_piece_extension', index=69, + number=84, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("abc").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))) +DEFAULT_CORD_EXTENSION_FIELD_NUMBER = 85 +default_cord_extension = _descriptor.FieldDescriptor( + name='default_cord_extension', full_name='protobuf_unittest.default_cord_extension', index=70, + number=85, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("123").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))) +ONEOF_UINT32_EXTENSION_FIELD_NUMBER = 111 +oneof_uint32_extension = _descriptor.FieldDescriptor( + name='oneof_uint32_extension', full_name='protobuf_unittest.oneof_uint32_extension', index=71, + number=111, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ONEOF_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 112 +oneof_nested_message_extension = _descriptor.FieldDescriptor( + name='oneof_nested_message_extension', full_name='protobuf_unittest.oneof_nested_message_extension', index=72, + number=112, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ONEOF_STRING_EXTENSION_FIELD_NUMBER = 113 +oneof_string_extension = _descriptor.FieldDescriptor( + name='oneof_string_extension', full_name='protobuf_unittest.oneof_string_extension', index=73, + number=113, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +ONEOF_BYTES_EXTENSION_FIELD_NUMBER = 114 +oneof_bytes_extension = _descriptor.FieldDescriptor( + name='oneof_bytes_extension', full_name='protobuf_unittest.oneof_bytes_extension', index=74, + number=114, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +MY_EXTENSION_STRING_FIELD_NUMBER = 50 +my_extension_string = _descriptor.FieldDescriptor( + name='my_extension_string', full_name='protobuf_unittest.my_extension_string', index=75, + number=50, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +MY_EXTENSION_INT_FIELD_NUMBER = 5 +my_extension_int = _descriptor.FieldDescriptor( + name='my_extension_int', full_name='protobuf_unittest.my_extension_int', index=76, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None) +PACKED_INT32_EXTENSION_FIELD_NUMBER = 90 +packed_int32_extension = _descriptor.FieldDescriptor( + name='packed_int32_extension', full_name='protobuf_unittest.packed_int32_extension', index=77, + number=90, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_INT64_EXTENSION_FIELD_NUMBER = 91 +packed_int64_extension = _descriptor.FieldDescriptor( + name='packed_int64_extension', full_name='protobuf_unittest.packed_int64_extension', index=78, + number=91, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_UINT32_EXTENSION_FIELD_NUMBER = 92 +packed_uint32_extension = _descriptor.FieldDescriptor( + name='packed_uint32_extension', full_name='protobuf_unittest.packed_uint32_extension', index=79, + number=92, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_UINT64_EXTENSION_FIELD_NUMBER = 93 +packed_uint64_extension = _descriptor.FieldDescriptor( + name='packed_uint64_extension', full_name='protobuf_unittest.packed_uint64_extension', index=80, + number=93, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_SINT32_EXTENSION_FIELD_NUMBER = 94 +packed_sint32_extension = _descriptor.FieldDescriptor( + name='packed_sint32_extension', full_name='protobuf_unittest.packed_sint32_extension', index=81, + number=94, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_SINT64_EXTENSION_FIELD_NUMBER = 95 +packed_sint64_extension = _descriptor.FieldDescriptor( + name='packed_sint64_extension', full_name='protobuf_unittest.packed_sint64_extension', index=82, + number=95, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_FIXED32_EXTENSION_FIELD_NUMBER = 96 +packed_fixed32_extension = _descriptor.FieldDescriptor( + name='packed_fixed32_extension', full_name='protobuf_unittest.packed_fixed32_extension', index=83, + number=96, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_FIXED64_EXTENSION_FIELD_NUMBER = 97 +packed_fixed64_extension = _descriptor.FieldDescriptor( + name='packed_fixed64_extension', full_name='protobuf_unittest.packed_fixed64_extension', index=84, + number=97, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_SFIXED32_EXTENSION_FIELD_NUMBER = 98 +packed_sfixed32_extension = _descriptor.FieldDescriptor( + name='packed_sfixed32_extension', full_name='protobuf_unittest.packed_sfixed32_extension', index=85, + number=98, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_SFIXED64_EXTENSION_FIELD_NUMBER = 99 +packed_sfixed64_extension = _descriptor.FieldDescriptor( + name='packed_sfixed64_extension', full_name='protobuf_unittest.packed_sfixed64_extension', index=86, + number=99, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_FLOAT_EXTENSION_FIELD_NUMBER = 100 +packed_float_extension = _descriptor.FieldDescriptor( + name='packed_float_extension', full_name='protobuf_unittest.packed_float_extension', index=87, + number=100, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_DOUBLE_EXTENSION_FIELD_NUMBER = 101 +packed_double_extension = _descriptor.FieldDescriptor( + name='packed_double_extension', full_name='protobuf_unittest.packed_double_extension', index=88, + number=101, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_BOOL_EXTENSION_FIELD_NUMBER = 102 +packed_bool_extension = _descriptor.FieldDescriptor( + name='packed_bool_extension', full_name='protobuf_unittest.packed_bool_extension', index=89, + number=102, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +PACKED_ENUM_EXTENSION_FIELD_NUMBER = 103 +packed_enum_extension = _descriptor.FieldDescriptor( + name='packed_enum_extension', full_name='protobuf_unittest.packed_enum_extension', index=90, + number=103, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))) +UNPACKED_INT32_EXTENSION_FIELD_NUMBER = 90 +unpacked_int32_extension = _descriptor.FieldDescriptor( + name='unpacked_int32_extension', full_name='protobuf_unittest.unpacked_int32_extension', index=91, + number=90, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_INT64_EXTENSION_FIELD_NUMBER = 91 +unpacked_int64_extension = _descriptor.FieldDescriptor( + name='unpacked_int64_extension', full_name='protobuf_unittest.unpacked_int64_extension', index=92, + number=91, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_UINT32_EXTENSION_FIELD_NUMBER = 92 +unpacked_uint32_extension = _descriptor.FieldDescriptor( + name='unpacked_uint32_extension', full_name='protobuf_unittest.unpacked_uint32_extension', index=93, + number=92, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_UINT64_EXTENSION_FIELD_NUMBER = 93 +unpacked_uint64_extension = _descriptor.FieldDescriptor( + name='unpacked_uint64_extension', full_name='protobuf_unittest.unpacked_uint64_extension', index=94, + number=93, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_SINT32_EXTENSION_FIELD_NUMBER = 94 +unpacked_sint32_extension = _descriptor.FieldDescriptor( + name='unpacked_sint32_extension', full_name='protobuf_unittest.unpacked_sint32_extension', index=95, + number=94, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_SINT64_EXTENSION_FIELD_NUMBER = 95 +unpacked_sint64_extension = _descriptor.FieldDescriptor( + name='unpacked_sint64_extension', full_name='protobuf_unittest.unpacked_sint64_extension', index=96, + number=95, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_FIXED32_EXTENSION_FIELD_NUMBER = 96 +unpacked_fixed32_extension = _descriptor.FieldDescriptor( + name='unpacked_fixed32_extension', full_name='protobuf_unittest.unpacked_fixed32_extension', index=97, + number=96, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_FIXED64_EXTENSION_FIELD_NUMBER = 97 +unpacked_fixed64_extension = _descriptor.FieldDescriptor( + name='unpacked_fixed64_extension', full_name='protobuf_unittest.unpacked_fixed64_extension', index=98, + number=97, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_SFIXED32_EXTENSION_FIELD_NUMBER = 98 +unpacked_sfixed32_extension = _descriptor.FieldDescriptor( + name='unpacked_sfixed32_extension', full_name='protobuf_unittest.unpacked_sfixed32_extension', index=99, + number=98, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_SFIXED64_EXTENSION_FIELD_NUMBER = 99 +unpacked_sfixed64_extension = _descriptor.FieldDescriptor( + name='unpacked_sfixed64_extension', full_name='protobuf_unittest.unpacked_sfixed64_extension', index=100, + number=99, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_FLOAT_EXTENSION_FIELD_NUMBER = 100 +unpacked_float_extension = _descriptor.FieldDescriptor( + name='unpacked_float_extension', full_name='protobuf_unittest.unpacked_float_extension', index=101, + number=100, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_DOUBLE_EXTENSION_FIELD_NUMBER = 101 +unpacked_double_extension = _descriptor.FieldDescriptor( + name='unpacked_double_extension', full_name='protobuf_unittest.unpacked_double_extension', index=102, + number=101, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_BOOL_EXTENSION_FIELD_NUMBER = 102 +unpacked_bool_extension = _descriptor.FieldDescriptor( + name='unpacked_bool_extension', full_name='protobuf_unittest.unpacked_bool_extension', index=103, + number=102, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) +UNPACKED_ENUM_EXTENSION_FIELD_NUMBER = 103 +unpacked_enum_extension = _descriptor.FieldDescriptor( + name='unpacked_enum_extension', full_name='protobuf_unittest.unpacked_enum_extension', index=104, + number=103, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))) + +_TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='protobuf_unittest.TestAllTypes.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAZ', index=2, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=3, number=-1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3202, + serialized_end=3259, +) +_sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM) + +_TESTONEOF2_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='protobuf_unittest.TestOneof2.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAZ', index=2, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3202, + serialized_end=3241, +) +_sym_db.RegisterEnumDescriptor(_TESTONEOF2_NESTEDENUM) + +_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE = _descriptor.EnumDescriptor( + name='DynamicEnumType', + full_name='protobuf_unittest.TestDynamicExtensions.DynamicEnumType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='DYNAMIC_FOO', index=0, number=2200, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DYNAMIC_BAR', index=1, number=2201, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DYNAMIC_BAZ', index=2, number=2202, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=10724, + serialized_end=10795, +) +_sym_db.RegisterEnumDescriptor(_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE) + + +_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest.TestAllTypes.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bb', full_name='protobuf_unittest.TestAllTypes.NestedMessage.bb', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3117, + serialized_end=3144, +) + +_TESTALLTYPES_OPTIONALGROUP = _descriptor.Descriptor( + name='OptionalGroup', + full_name='protobuf_unittest.TestAllTypes.OptionalGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestAllTypes.OptionalGroup.a', index=0, + number=17, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3146, + serialized_end=3172, +) + +_TESTALLTYPES_REPEATEDGROUP = _descriptor.Descriptor( + name='RepeatedGroup', + full_name='protobuf_unittest.TestAllTypes.RepeatedGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestAllTypes.RepeatedGroup.a', index=0, + number=47, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3174, + serialized_end=3200, +) + +_TESTALLTYPES = _descriptor.Descriptor( + name='TestAllTypes', + full_name='protobuf_unittest.TestAllTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_int32', full_name='protobuf_unittest.TestAllTypes.optional_int32', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_int64', full_name='protobuf_unittest.TestAllTypes.optional_int64', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint32', full_name='protobuf_unittest.TestAllTypes.optional_uint32', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint64', full_name='protobuf_unittest.TestAllTypes.optional_uint64', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sint32', full_name='protobuf_unittest.TestAllTypes.optional_sint32', index=4, + number=5, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sint64', full_name='protobuf_unittest.TestAllTypes.optional_sint64', index=5, + number=6, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_fixed32', full_name='protobuf_unittest.TestAllTypes.optional_fixed32', index=6, + number=7, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_fixed64', full_name='protobuf_unittest.TestAllTypes.optional_fixed64', index=7, + number=8, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sfixed32', full_name='protobuf_unittest.TestAllTypes.optional_sfixed32', index=8, + number=9, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sfixed64', full_name='protobuf_unittest.TestAllTypes.optional_sfixed64', index=9, + number=10, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_float', full_name='protobuf_unittest.TestAllTypes.optional_float', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_double', full_name='protobuf_unittest.TestAllTypes.optional_double', index=11, + number=12, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_bool', full_name='protobuf_unittest.TestAllTypes.optional_bool', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_string', full_name='protobuf_unittest.TestAllTypes.optional_string', index=13, + number=14, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_bytes', full_name='protobuf_unittest.TestAllTypes.optional_bytes', index=14, + number=15, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optionalgroup', full_name='protobuf_unittest.TestAllTypes.optionalgroup', index=15, + number=16, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_message', full_name='protobuf_unittest.TestAllTypes.optional_nested_message', index=16, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_foreign_message', full_name='protobuf_unittest.TestAllTypes.optional_foreign_message', index=17, + number=19, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_import_message', full_name='protobuf_unittest.TestAllTypes.optional_import_message', index=18, + number=20, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_enum', full_name='protobuf_unittest.TestAllTypes.optional_nested_enum', index=19, + number=21, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_foreign_enum', full_name='protobuf_unittest.TestAllTypes.optional_foreign_enum', index=20, + number=22, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_import_enum', full_name='protobuf_unittest.TestAllTypes.optional_import_enum', index=21, + number=23, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=7, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_string_piece', full_name='protobuf_unittest.TestAllTypes.optional_string_piece', index=22, + number=24, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='optional_cord', full_name='protobuf_unittest.TestAllTypes.optional_cord', index=23, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='optional_public_import_message', full_name='protobuf_unittest.TestAllTypes.optional_public_import_message', index=24, + number=26, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_lazy_message', full_name='protobuf_unittest.TestAllTypes.optional_lazy_message', index=25, + number=27, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='protobuf_unittest.TestAllTypes.repeated_int32', index=26, + number=31, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='protobuf_unittest.TestAllTypes.repeated_int64', index=27, + number=32, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint32', full_name='protobuf_unittest.TestAllTypes.repeated_uint32', index=28, + number=33, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='protobuf_unittest.TestAllTypes.repeated_uint64', index=29, + number=34, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint32', full_name='protobuf_unittest.TestAllTypes.repeated_sint32', index=30, + number=35, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint64', full_name='protobuf_unittest.TestAllTypes.repeated_sint64', index=31, + number=36, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='protobuf_unittest.TestAllTypes.repeated_fixed32', index=32, + number=37, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='protobuf_unittest.TestAllTypes.repeated_fixed64', index=33, + number=38, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed32', full_name='protobuf_unittest.TestAllTypes.repeated_sfixed32', index=34, + number=39, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed64', full_name='protobuf_unittest.TestAllTypes.repeated_sfixed64', index=35, + number=40, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='protobuf_unittest.TestAllTypes.repeated_float', index=36, + number=41, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_double', full_name='protobuf_unittest.TestAllTypes.repeated_double', index=37, + number=42, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bool', full_name='protobuf_unittest.TestAllTypes.repeated_bool', index=38, + number=43, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string', full_name='protobuf_unittest.TestAllTypes.repeated_string', index=39, + number=44, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bytes', full_name='protobuf_unittest.TestAllTypes.repeated_bytes', index=40, + number=45, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeatedgroup', full_name='protobuf_unittest.TestAllTypes.repeatedgroup', index=41, + number=46, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_message', full_name='protobuf_unittest.TestAllTypes.repeated_nested_message', index=42, + number=48, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_foreign_message', full_name='protobuf_unittest.TestAllTypes.repeated_foreign_message', index=43, + number=49, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_import_message', full_name='protobuf_unittest.TestAllTypes.repeated_import_message', index=44, + number=50, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='protobuf_unittest.TestAllTypes.repeated_nested_enum', index=45, + number=51, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_foreign_enum', full_name='protobuf_unittest.TestAllTypes.repeated_foreign_enum', index=46, + number=52, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_import_enum', full_name='protobuf_unittest.TestAllTypes.repeated_import_enum', index=47, + number=53, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string_piece', full_name='protobuf_unittest.TestAllTypes.repeated_string_piece', index=48, + number=54, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='repeated_cord', full_name='protobuf_unittest.TestAllTypes.repeated_cord', index=49, + number=55, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='repeated_lazy_message', full_name='protobuf_unittest.TestAllTypes.repeated_lazy_message', index=50, + number=57, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='default_int32', full_name='protobuf_unittest.TestAllTypes.default_int32', index=51, + number=61, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=41, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_int64', full_name='protobuf_unittest.TestAllTypes.default_int64', index=52, + number=62, type=3, cpp_type=2, label=1, + has_default_value=True, default_value=42, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_uint32', full_name='protobuf_unittest.TestAllTypes.default_uint32', index=53, + number=63, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=43, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_uint64', full_name='protobuf_unittest.TestAllTypes.default_uint64', index=54, + number=64, type=4, cpp_type=4, label=1, + has_default_value=True, default_value=44, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sint32', full_name='protobuf_unittest.TestAllTypes.default_sint32', index=55, + number=65, type=17, cpp_type=1, label=1, + has_default_value=True, default_value=-45, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sint64', full_name='protobuf_unittest.TestAllTypes.default_sint64', index=56, + number=66, type=18, cpp_type=2, label=1, + has_default_value=True, default_value=46, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_fixed32', full_name='protobuf_unittest.TestAllTypes.default_fixed32', index=57, + number=67, type=7, cpp_type=3, label=1, + has_default_value=True, default_value=47, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_fixed64', full_name='protobuf_unittest.TestAllTypes.default_fixed64', index=58, + number=68, type=6, cpp_type=4, label=1, + has_default_value=True, default_value=48, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sfixed32', full_name='protobuf_unittest.TestAllTypes.default_sfixed32', index=59, + number=69, type=15, cpp_type=1, label=1, + has_default_value=True, default_value=49, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_sfixed64', full_name='protobuf_unittest.TestAllTypes.default_sfixed64', index=60, + number=70, type=16, cpp_type=2, label=1, + has_default_value=True, default_value=-50, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_float', full_name='protobuf_unittest.TestAllTypes.default_float', index=61, + number=71, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=51.5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_double', full_name='protobuf_unittest.TestAllTypes.default_double', index=62, + number=72, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=52000, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_bool', full_name='protobuf_unittest.TestAllTypes.default_bool', index=63, + number=73, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_string', full_name='protobuf_unittest.TestAllTypes.default_string', index=64, + number=74, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("hello").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_bytes', full_name='protobuf_unittest.TestAllTypes.default_bytes', index=65, + number=75, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("world"), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_nested_enum', full_name='protobuf_unittest.TestAllTypes.default_nested_enum', index=66, + number=81, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_foreign_enum', full_name='protobuf_unittest.TestAllTypes.default_foreign_enum', index=67, + number=82, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_import_enum', full_name='protobuf_unittest.TestAllTypes.default_import_enum', index=68, + number=83, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=8, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='default_string_piece', full_name='protobuf_unittest.TestAllTypes.default_string_piece', index=69, + number=84, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("abc").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='default_cord', full_name='protobuf_unittest.TestAllTypes.default_cord', index=70, + number=85, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("123").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='oneof_uint32', full_name='protobuf_unittest.TestAllTypes.oneof_uint32', index=71, + number=111, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_nested_message', full_name='protobuf_unittest.TestAllTypes.oneof_nested_message', index=72, + number=112, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_string', full_name='protobuf_unittest.TestAllTypes.oneof_string', index=73, + number=113, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_bytes', full_name='protobuf_unittest.TestAllTypes.oneof_bytes', index=74, + number=114, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTALLTYPES_NESTEDMESSAGE, _TESTALLTYPES_OPTIONALGROUP, _TESTALLTYPES_REPEATEDGROUP, ], + enum_types=[ + _TESTALLTYPES_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='oneof_field', full_name='protobuf_unittest.TestAllTypes.oneof_field', + index=0, containing_type=None, fields=[]), + ], + serialized_start=93, + serialized_end=3274, +) + + +_NESTEDTESTALLTYPES = _descriptor.Descriptor( + name='NestedTestAllTypes', + full_name='protobuf_unittest.NestedTestAllTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='child', full_name='protobuf_unittest.NestedTestAllTypes.child', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='payload', full_name='protobuf_unittest.NestedTestAllTypes.payload', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_child', full_name='protobuf_unittest.NestedTestAllTypes.repeated_child', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3277, + serialized_end=3464, +) + + +_TESTDEPRECATEDFIELDS = _descriptor.Descriptor( + name='TestDeprecatedFields', + full_name='protobuf_unittest.TestDeprecatedFields', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='deprecated_int32', full_name='protobuf_unittest.TestDeprecatedFields.deprecated_int32', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3466, + serialized_end=3518, +) + + +_FOREIGNMESSAGE = _descriptor.Descriptor( + name='ForeignMessage', + full_name='protobuf_unittest.ForeignMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='c', full_name='protobuf_unittest.ForeignMessage.c', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3520, + serialized_end=3547, +) + + +_TESTRESERVEDFIELDS = _descriptor.Descriptor( + name='TestReservedFields', + full_name='protobuf_unittest.TestReservedFields', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3549, + serialized_end=3597, +) + + +_TESTALLEXTENSIONS = _descriptor.Descriptor( + name='TestAllExtensions', + full_name='protobuf_unittest.TestAllExtensions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1, 536870912), ], + oneofs=[ + ], + serialized_start=3599, + serialized_end=3628, +) + + +_OPTIONALGROUP_EXTENSION = _descriptor.Descriptor( + name='OptionalGroup_extension', + full_name='protobuf_unittest.OptionalGroup_extension', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.OptionalGroup_extension.a', index=0, + number=17, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3630, + serialized_end=3666, +) + + +_REPEATEDGROUP_EXTENSION = _descriptor.Descriptor( + name='RepeatedGroup_extension', + full_name='protobuf_unittest.RepeatedGroup_extension', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.RepeatedGroup_extension.a', index=0, + number=47, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3668, + serialized_end=3704, +) + + +_TESTNESTEDEXTENSION = _descriptor.Descriptor( + name='TestNestedExtension', + full_name='protobuf_unittest.TestNestedExtension', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + _descriptor.FieldDescriptor( + name='test', full_name='protobuf_unittest.TestNestedExtension.test', index=0, + number=1002, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("test").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nested_string_extension', full_name='protobuf_unittest.TestNestedExtension.nested_string_extension', index=1, + number=1003, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3707, + serialized_end=3859, +) + + +_TESTREQUIRED = _descriptor.Descriptor( + name='TestRequired', + full_name='protobuf_unittest.TestRequired', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestRequired.a', index=0, + number=1, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy2', full_name='protobuf_unittest.TestRequired.dummy2', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='b', full_name='protobuf_unittest.TestRequired.b', index=2, + number=3, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy4', full_name='protobuf_unittest.TestRequired.dummy4', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy5', full_name='protobuf_unittest.TestRequired.dummy5', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy6', full_name='protobuf_unittest.TestRequired.dummy6', index=5, + number=6, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy7', full_name='protobuf_unittest.TestRequired.dummy7', index=6, + number=7, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy8', full_name='protobuf_unittest.TestRequired.dummy8', index=7, + number=8, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy9', full_name='protobuf_unittest.TestRequired.dummy9', index=8, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy10', full_name='protobuf_unittest.TestRequired.dummy10', index=9, + number=10, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy11', full_name='protobuf_unittest.TestRequired.dummy11', index=10, + number=11, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy12', full_name='protobuf_unittest.TestRequired.dummy12', index=11, + number=12, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy13', full_name='protobuf_unittest.TestRequired.dummy13', index=12, + number=13, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy14', full_name='protobuf_unittest.TestRequired.dummy14', index=13, + number=14, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy15', full_name='protobuf_unittest.TestRequired.dummy15', index=14, + number=15, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy16', full_name='protobuf_unittest.TestRequired.dummy16', index=15, + number=16, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy17', full_name='protobuf_unittest.TestRequired.dummy17', index=16, + number=17, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy18', full_name='protobuf_unittest.TestRequired.dummy18', index=17, + number=18, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy19', full_name='protobuf_unittest.TestRequired.dummy19', index=18, + number=19, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy20', full_name='protobuf_unittest.TestRequired.dummy20', index=19, + number=20, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy21', full_name='protobuf_unittest.TestRequired.dummy21', index=20, + number=21, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy22', full_name='protobuf_unittest.TestRequired.dummy22', index=21, + number=22, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy23', full_name='protobuf_unittest.TestRequired.dummy23', index=22, + number=23, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy24', full_name='protobuf_unittest.TestRequired.dummy24', index=23, + number=24, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy25', full_name='protobuf_unittest.TestRequired.dummy25', index=24, + number=25, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy26', full_name='protobuf_unittest.TestRequired.dummy26', index=25, + number=26, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy27', full_name='protobuf_unittest.TestRequired.dummy27', index=26, + number=27, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy28', full_name='protobuf_unittest.TestRequired.dummy28', index=27, + number=28, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy29', full_name='protobuf_unittest.TestRequired.dummy29', index=28, + number=29, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy30', full_name='protobuf_unittest.TestRequired.dummy30', index=29, + number=30, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy31', full_name='protobuf_unittest.TestRequired.dummy31', index=30, + number=31, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy32', full_name='protobuf_unittest.TestRequired.dummy32', index=31, + number=32, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='c', full_name='protobuf_unittest.TestRequired.c', index=32, + number=33, type=5, cpp_type=1, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='single', full_name='protobuf_unittest.TestRequired.single', index=0, + number=1000, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='multi', full_name='protobuf_unittest.TestRequired.multi', index=1, + number=1001, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3862, + serialized_end=4587, +) + + +_TESTREQUIREDFOREIGN = _descriptor.Descriptor( + name='TestRequiredForeign', + full_name='protobuf_unittest.TestRequiredForeign', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_message', full_name='protobuf_unittest.TestRequiredForeign.optional_message', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_message', full_name='protobuf_unittest.TestRequiredForeign.repeated_message', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dummy', full_name='protobuf_unittest.TestRequiredForeign.dummy', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4590, + serialized_end=4744, +) + + +_TESTFOREIGNNESTED = _descriptor.Descriptor( + name='TestForeignNested', + full_name='protobuf_unittest.TestForeignNested', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foreign_nested', full_name='protobuf_unittest.TestForeignNested.foreign_nested', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4746, + serialized_end=4836, +) + + +_TESTEMPTYMESSAGE = _descriptor.Descriptor( + name='TestEmptyMessage', + full_name='protobuf_unittest.TestEmptyMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4838, + serialized_end=4856, +) + + +_TESTEMPTYMESSAGEWITHEXTENSIONS = _descriptor.Descriptor( + name='TestEmptyMessageWithExtensions', + full_name='protobuf_unittest.TestEmptyMessageWithExtensions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1, 536870912), ], + oneofs=[ + ], + serialized_start=4858, + serialized_end=4900, +) + + +_TESTMULTIPLEEXTENSIONRANGES = _descriptor.Descriptor( + name='TestMultipleExtensionRanges', + full_name='protobuf_unittest.TestMultipleExtensionRanges', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(42, 43), (4143, 4244), (65536, 536870912), ], + oneofs=[ + ], + serialized_start=4902, + serialized_end=4957, +) + + +_TESTREALLYLARGETAGNUMBER = _descriptor.Descriptor( + name='TestReallyLargeTagNumber', + full_name='protobuf_unittest.TestReallyLargeTagNumber', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestReallyLargeTagNumber.a', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bb', full_name='protobuf_unittest.TestReallyLargeTagNumber.bb', index=1, + number=268435455, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4959, + serialized_end=5011, +) + + +_TESTRECURSIVEMESSAGE = _descriptor.Descriptor( + name='TestRecursiveMessage', + full_name='protobuf_unittest.TestRecursiveMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestRecursiveMessage.a', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='i', full_name='protobuf_unittest.TestRecursiveMessage.i', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5013, + serialized_end=5098, +) + + +_TESTMUTUALRECURSIONA = _descriptor.Descriptor( + name='TestMutualRecursionA', + full_name='protobuf_unittest.TestMutualRecursionA', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bb', full_name='protobuf_unittest.TestMutualRecursionA.bb', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5100, + serialized_end=5175, +) + + +_TESTMUTUALRECURSIONB = _descriptor.Descriptor( + name='TestMutualRecursionB', + full_name='protobuf_unittest.TestMutualRecursionB', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestMutualRecursionB.a', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_int32', full_name='protobuf_unittest.TestMutualRecursionB.optional_int32', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5177, + serialized_end=5275, +) + + +_TESTDUPFIELDNUMBER_FOO = _descriptor.Descriptor( + name='Foo', + full_name='protobuf_unittest.TestDupFieldNumber.Foo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestDupFieldNumber.Foo.a', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5423, + serialized_end=5439, +) + +_TESTDUPFIELDNUMBER_BAR = _descriptor.Descriptor( + name='Bar', + full_name='protobuf_unittest.TestDupFieldNumber.Bar', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestDupFieldNumber.Bar.a', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5441, + serialized_end=5457, +) + +_TESTDUPFIELDNUMBER = _descriptor.Descriptor( + name='TestDupFieldNumber', + full_name='protobuf_unittest.TestDupFieldNumber', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestDupFieldNumber.a', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo', full_name='protobuf_unittest.TestDupFieldNumber.foo', index=1, + number=2, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bar', full_name='protobuf_unittest.TestDupFieldNumber.bar', index=2, + number=3, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTDUPFIELDNUMBER_FOO, _TESTDUPFIELDNUMBER_BAR, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5278, + serialized_end=5457, +) + + +_TESTEAGERMESSAGE = _descriptor.Descriptor( + name='TestEagerMessage', + full_name='protobuf_unittest.TestEagerMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sub_message', full_name='protobuf_unittest.TestEagerMessage.sub_message', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\000'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5459, + serialized_end=5535, +) + + +_TESTLAZYMESSAGE = _descriptor.Descriptor( + name='TestLazyMessage', + full_name='protobuf_unittest.TestLazyMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sub_message', full_name='protobuf_unittest.TestLazyMessage.sub_message', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5537, + serialized_end=5612, +) + + +_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='nestedmessage_repeated_int32', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage.nestedmessage_repeated_int32', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nestedmessage_repeated_foreignmessage', full_name='protobuf_unittest.TestNestedMessageHasBits.NestedMessage.nestedmessage_repeated_foreignmessage', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5736, + serialized_end=5871, +) + +_TESTNESTEDMESSAGEHASBITS = _descriptor.Descriptor( + name='TestNestedMessageHasBits', + full_name='protobuf_unittest.TestNestedMessageHasBits', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_nested_message', full_name='protobuf_unittest.TestNestedMessageHasBits.optional_nested_message', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5615, + serialized_end=5871, +) + + +_TESTCAMELCASEFIELDNAMES = _descriptor.Descriptor( + name='TestCamelCaseFieldNames', + full_name='protobuf_unittest.TestCamelCaseFieldNames', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='PrimitiveField', full_name='protobuf_unittest.TestCamelCaseFieldNames.PrimitiveField', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='StringField', full_name='protobuf_unittest.TestCamelCaseFieldNames.StringField', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='EnumField', full_name='protobuf_unittest.TestCamelCaseFieldNames.EnumField', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='MessageField', full_name='protobuf_unittest.TestCamelCaseFieldNames.MessageField', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='StringPieceField', full_name='protobuf_unittest.TestCamelCaseFieldNames.StringPieceField', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='CordField', full_name='protobuf_unittest.TestCamelCaseFieldNames.CordField', index=5, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='RepeatedPrimitiveField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedPrimitiveField', index=6, + number=7, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='RepeatedStringField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedStringField', index=7, + number=8, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='RepeatedEnumField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedEnumField', index=8, + number=9, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='RepeatedMessageField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedMessageField', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='RepeatedStringPieceField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedStringPieceField', index=10, + number=11, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='RepeatedCordField', full_name='protobuf_unittest.TestCamelCaseFieldNames.RepeatedCordField', index=11, + number=12, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5874, + serialized_end=6359, +) + + +_TESTFIELDORDERINGS_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest.TestFieldOrderings.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='oo', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage.oo', index=0, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bb', full_name='protobuf_unittest.TestFieldOrderings.NestedMessage.bb', index=1, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6524, + serialized_end=6563, +) + +_TESTFIELDORDERINGS = _descriptor.Descriptor( + name='TestFieldOrderings', + full_name='protobuf_unittest.TestFieldOrderings', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='my_string', full_name='protobuf_unittest.TestFieldOrderings.my_string', index=0, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='my_int', full_name='protobuf_unittest.TestFieldOrderings.my_int', index=1, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='my_float', full_name='protobuf_unittest.TestFieldOrderings.my_float', index=2, + number=101, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_message', full_name='protobuf_unittest.TestFieldOrderings.optional_nested_message', index=3, + number=200, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTFIELDORDERINGS_NESTEDMESSAGE, ], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(2, 11), (12, 101), ], + oneofs=[ + ], + serialized_start=6362, + serialized_end=6575, +) + + +_TESTEXTREMEDEFAULTVALUES = _descriptor.Descriptor( + name='TestExtremeDefaultValues', + full_name='protobuf_unittest.TestExtremeDefaultValues', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='escaped_bytes', full_name='protobuf_unittest.TestExtremeDefaultValues.escaped_bytes', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("\000\001\007\010\014\n\r\t\013\\\'\"\376"), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='large_uint32', full_name='protobuf_unittest.TestExtremeDefaultValues.large_uint32', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=True, default_value=4294967295, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='large_uint64', full_name='protobuf_unittest.TestExtremeDefaultValues.large_uint64', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=True, default_value=18446744073709551615, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='small_int32', full_name='protobuf_unittest.TestExtremeDefaultValues.small_int32', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-2147483647, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='small_int64', full_name='protobuf_unittest.TestExtremeDefaultValues.small_int64', index=4, + number=5, type=3, cpp_type=2, label=1, + has_default_value=True, default_value=-9223372036854775807, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='really_small_int32', full_name='protobuf_unittest.TestExtremeDefaultValues.really_small_int32', index=5, + number=21, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=-2147483648, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='really_small_int64', full_name='protobuf_unittest.TestExtremeDefaultValues.really_small_int64', index=6, + number=22, type=3, cpp_type=2, label=1, + has_default_value=True, default_value=-9223372036854775808, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='utf8_string', full_name='protobuf_unittest.TestExtremeDefaultValues.utf8_string', index=7, + number=6, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("\341\210\264").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='zero_float', full_name='protobuf_unittest.TestExtremeDefaultValues.zero_float', index=8, + number=7, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='one_float', full_name='protobuf_unittest.TestExtremeDefaultValues.one_float', index=9, + number=8, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='small_float', full_name='protobuf_unittest.TestExtremeDefaultValues.small_float', index=10, + number=9, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=1.5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='negative_one_float', full_name='protobuf_unittest.TestExtremeDefaultValues.negative_one_float', index=11, + number=10, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=-1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='negative_float', full_name='protobuf_unittest.TestExtremeDefaultValues.negative_float', index=12, + number=11, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=-1.5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='large_float', full_name='protobuf_unittest.TestExtremeDefaultValues.large_float', index=13, + number=12, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=2e+08, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='small_negative_float', full_name='protobuf_unittest.TestExtremeDefaultValues.small_negative_float', index=14, + number=13, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=-8e-28, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='inf_double', full_name='protobuf_unittest.TestExtremeDefaultValues.inf_double', index=15, + number=14, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=1e10000, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='neg_inf_double', full_name='protobuf_unittest.TestExtremeDefaultValues.neg_inf_double', index=16, + number=15, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=-1e10000, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nan_double', full_name='protobuf_unittest.TestExtremeDefaultValues.nan_double', index=17, + number=16, type=1, cpp_type=5, label=1, + has_default_value=True, default_value=(1e10000 * 0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='inf_float', full_name='protobuf_unittest.TestExtremeDefaultValues.inf_float', index=18, + number=17, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=1e10000, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='neg_inf_float', full_name='protobuf_unittest.TestExtremeDefaultValues.neg_inf_float', index=19, + number=18, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=-1e10000, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='nan_float', full_name='protobuf_unittest.TestExtremeDefaultValues.nan_float', index=20, + number=19, type=2, cpp_type=6, label=1, + has_default_value=True, default_value=(1e10000 * 0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cpp_trigraph', full_name='protobuf_unittest.TestExtremeDefaultValues.cpp_trigraph', index=21, + number=20, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("? ? ?? ?? ??? ??/ ??-").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.string_with_zero', index=22, + number=23, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("hel\000lo").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bytes_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.bytes_with_zero', index=23, + number=24, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("wor\000ld"), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_piece_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.string_piece_with_zero', index=24, + number=25, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("ab\000c").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='cord_with_zero', full_name='protobuf_unittest.TestExtremeDefaultValues.cord_with_zero', index=25, + number=26, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("12\0003").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='replacement_string', full_name='protobuf_unittest.TestExtremeDefaultValues.replacement_string', index=26, + number=27, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("${unknown}").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6578, + serialized_end=7528, +) + + +_SPARSEENUMMESSAGE = _descriptor.Descriptor( + name='SparseEnumMessage', + full_name='protobuf_unittest.SparseEnumMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sparse_enum', full_name='protobuf_unittest.SparseEnumMessage.sparse_enum', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=123, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7530, + serialized_end=7605, +) + + +_ONESTRING = _descriptor.Descriptor( + name='OneString', + full_name='protobuf_unittest.OneString', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.OneString.data', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7607, + serialized_end=7632, +) + + +_MORESTRING = _descriptor.Descriptor( + name='MoreString', + full_name='protobuf_unittest.MoreString', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.MoreString.data', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7634, + serialized_end=7660, +) + + +_ONEBYTES = _descriptor.Descriptor( + name='OneBytes', + full_name='protobuf_unittest.OneBytes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.OneBytes.data', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7662, + serialized_end=7686, +) + + +_MOREBYTES = _descriptor.Descriptor( + name='MoreBytes', + full_name='protobuf_unittest.MoreBytes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.MoreBytes.data', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7688, + serialized_end=7713, +) + + +_INT32MESSAGE = _descriptor.Descriptor( + name='Int32Message', + full_name='protobuf_unittest.Int32Message', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.Int32Message.data', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7715, + serialized_end=7743, +) + + +_UINT32MESSAGE = _descriptor.Descriptor( + name='Uint32Message', + full_name='protobuf_unittest.Uint32Message', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.Uint32Message.data', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7745, + serialized_end=7774, +) + + +_INT64MESSAGE = _descriptor.Descriptor( + name='Int64Message', + full_name='protobuf_unittest.Int64Message', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.Int64Message.data', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7776, + serialized_end=7804, +) + + +_UINT64MESSAGE = _descriptor.Descriptor( + name='Uint64Message', + full_name='protobuf_unittest.Uint64Message', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.Uint64Message.data', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7806, + serialized_end=7835, +) + + +_BOOLMESSAGE = _descriptor.Descriptor( + name='BoolMessage', + full_name='protobuf_unittest.BoolMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='protobuf_unittest.BoolMessage.data', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=7837, + serialized_end=7864, +) + + +_TESTONEOF_FOOGROUP = _descriptor.Descriptor( + name='FooGroup', + full_name='protobuf_unittest.TestOneof.FooGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestOneof.FooGroup.a', index=0, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='b', full_name='protobuf_unittest.TestOneof.FooGroup.b', index=1, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=8036, + serialized_end=8068, +) + +_TESTONEOF = _descriptor.Descriptor( + name='TestOneof', + full_name='protobuf_unittest.TestOneof', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foo_int', full_name='protobuf_unittest.TestOneof.foo_int', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_string', full_name='protobuf_unittest.TestOneof.foo_string', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_message', full_name='protobuf_unittest.TestOneof.foo_message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foogroup', full_name='protobuf_unittest.TestOneof.foogroup', index=3, + number=4, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTONEOF_FOOGROUP, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='foo', full_name='protobuf_unittest.TestOneof.foo', + index=0, containing_type=None, fields=[]), + ], + serialized_start=7867, + serialized_end=8075, +) + + +_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP = _descriptor.Descriptor( + name='FooGroup', + full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup.a', index=0, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='b', full_name='protobuf_unittest.TestOneofBackwardsCompatible.FooGroup.b', index=1, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=8036, + serialized_end=8068, +) + +_TESTONEOFBACKWARDSCOMPATIBLE = _descriptor.Descriptor( + name='TestOneofBackwardsCompatible', + full_name='protobuf_unittest.TestOneofBackwardsCompatible', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foo_int', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_int', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_string', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_string', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_message', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foo_message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foogroup', full_name='protobuf_unittest.TestOneofBackwardsCompatible.foogroup', index=3, + number=4, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=8078, + serialized_end=8309, +) + + +_TESTONEOF2_FOOGROUP = _descriptor.Descriptor( + name='FooGroup', + full_name='protobuf_unittest.TestOneof2.FooGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestOneof2.FooGroup.a', index=0, + number=9, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='b', full_name='protobuf_unittest.TestOneof2.FooGroup.b', index=1, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=8970, + serialized_end=9002, +) + +_TESTONEOF2_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest.TestOneof2.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='qux_int', full_name='protobuf_unittest.TestOneof2.NestedMessage.qux_int', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='corge_int', full_name='protobuf_unittest.TestOneof2.NestedMessage.corge_int', index=1, + number=2, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9004, + serialized_end=9055, +) + +_TESTONEOF2 = _descriptor.Descriptor( + name='TestOneof2', + full_name='protobuf_unittest.TestOneof2', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foo_int', full_name='protobuf_unittest.TestOneof2.foo_int', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_string', full_name='protobuf_unittest.TestOneof2.foo_string', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_cord', full_name='protobuf_unittest.TestOneof2.foo_cord', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='foo_string_piece', full_name='protobuf_unittest.TestOneof2.foo_string_piece', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='foo_bytes', full_name='protobuf_unittest.TestOneof2.foo_bytes', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_enum', full_name='protobuf_unittest.TestOneof2.foo_enum', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_message', full_name='protobuf_unittest.TestOneof2.foo_message', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foogroup', full_name='protobuf_unittest.TestOneof2.foogroup', index=7, + number=8, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_lazy_message', full_name='protobuf_unittest.TestOneof2.foo_lazy_message', index=8, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='bar_int', full_name='protobuf_unittest.TestOneof2.bar_int', index=9, + number=12, type=5, cpp_type=1, label=1, + has_default_value=True, default_value=5, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bar_string', full_name='protobuf_unittest.TestOneof2.bar_string', index=10, + number=13, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("STRING").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bar_cord', full_name='protobuf_unittest.TestOneof2.bar_cord', index=11, + number=14, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("CORD").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='bar_string_piece', full_name='protobuf_unittest.TestOneof2.bar_string_piece', index=12, + number=15, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("SPIECE").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='bar_bytes', full_name='protobuf_unittest.TestOneof2.bar_bytes', index=13, + number=16, type=12, cpp_type=9, label=1, + has_default_value=True, default_value=_b("BYTES"), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bar_enum', full_name='protobuf_unittest.TestOneof2.bar_enum', index=14, + number=17, type=14, cpp_type=8, label=1, + has_default_value=True, default_value=2, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='baz_int', full_name='protobuf_unittest.TestOneof2.baz_int', index=15, + number=18, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='baz_string', full_name='protobuf_unittest.TestOneof2.baz_string', index=16, + number=19, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("BAZ").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTONEOF2_FOOGROUP, _TESTONEOF2_NESTEDMESSAGE, ], + enum_types=[ + _TESTONEOF2_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='foo', full_name='protobuf_unittest.TestOneof2.foo', + index=0, containing_type=None, fields=[]), + _descriptor.OneofDescriptor( + name='bar', full_name='protobuf_unittest.TestOneof2.bar', + index=1, containing_type=None, fields=[]), + ], + serialized_start=8312, + serialized_end=9110, +) + + +_TESTREQUIREDONEOF_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='protobuf_unittest.TestRequiredOneof.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='required_double', full_name='protobuf_unittest.TestRequiredOneof.NestedMessage.required_double', index=0, + number=1, type=1, cpp_type=5, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9250, + serialized_end=9290, +) + +_TESTREQUIREDONEOF = _descriptor.Descriptor( + name='TestRequiredOneof', + full_name='protobuf_unittest.TestRequiredOneof', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='foo_int', full_name='protobuf_unittest.TestRequiredOneof.foo_int', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_string', full_name='protobuf_unittest.TestRequiredOneof.foo_string', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='foo_message', full_name='protobuf_unittest.TestRequiredOneof.foo_message', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTREQUIREDONEOF_NESTEDMESSAGE, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='foo', full_name='protobuf_unittest.TestRequiredOneof.foo', + index=0, containing_type=None, fields=[]), + ], + serialized_start=9113, + serialized_end=9297, +) + + +_TESTPACKEDTYPES = _descriptor.Descriptor( + name='TestPackedTypes', + full_name='protobuf_unittest.TestPackedTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='packed_int32', full_name='protobuf_unittest.TestPackedTypes.packed_int32', index=0, + number=90, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_int64', full_name='protobuf_unittest.TestPackedTypes.packed_int64', index=1, + number=91, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_uint32', full_name='protobuf_unittest.TestPackedTypes.packed_uint32', index=2, + number=92, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_uint64', full_name='protobuf_unittest.TestPackedTypes.packed_uint64', index=3, + number=93, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sint32', full_name='protobuf_unittest.TestPackedTypes.packed_sint32', index=4, + number=94, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sint64', full_name='protobuf_unittest.TestPackedTypes.packed_sint64', index=5, + number=95, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_fixed32', full_name='protobuf_unittest.TestPackedTypes.packed_fixed32', index=6, + number=96, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_fixed64', full_name='protobuf_unittest.TestPackedTypes.packed_fixed64', index=7, + number=97, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sfixed32', full_name='protobuf_unittest.TestPackedTypes.packed_sfixed32', index=8, + number=98, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sfixed64', full_name='protobuf_unittest.TestPackedTypes.packed_sfixed64', index=9, + number=99, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_float', full_name='protobuf_unittest.TestPackedTypes.packed_float', index=10, + number=100, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_double', full_name='protobuf_unittest.TestPackedTypes.packed_double', index=11, + number=101, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_bool', full_name='protobuf_unittest.TestPackedTypes.packed_bool', index=12, + number=102, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_enum', full_name='protobuf_unittest.TestPackedTypes.packed_enum', index=13, + number=103, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9300, + serialized_end=9726, +) + + +_TESTUNPACKEDTYPES = _descriptor.Descriptor( + name='TestUnpackedTypes', + full_name='protobuf_unittest.TestUnpackedTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='unpacked_int32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_int32', index=0, + number=90, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_int64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_int64', index=1, + number=91, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_uint32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_uint32', index=2, + number=92, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_uint64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_uint64', index=3, + number=93, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_sint32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sint32', index=4, + number=94, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_sint64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sint64', index=5, + number=95, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_fixed32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_fixed32', index=6, + number=96, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_fixed64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_fixed64', index=7, + number=97, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_sfixed32', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sfixed32', index=8, + number=98, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_sfixed64', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_sfixed64', index=9, + number=99, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_float', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_float', index=10, + number=100, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_double', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_double', index=11, + number=101, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_bool', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_bool', index=12, + number=102, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='unpacked_enum', full_name='protobuf_unittest.TestUnpackedTypes.unpacked_enum', index=13, + number=103, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=9729, + serialized_end=10185, +) + + +_TESTPACKEDEXTENSIONS = _descriptor.Descriptor( + name='TestPackedExtensions', + full_name='protobuf_unittest.TestPackedExtensions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1, 536870912), ], + oneofs=[ + ], + serialized_start=10187, + serialized_end=10219, +) + + +_TESTUNPACKEDEXTENSIONS = _descriptor.Descriptor( + name='TestUnpackedExtensions', + full_name='protobuf_unittest.TestUnpackedExtensions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1, 536870912), ], + oneofs=[ + ], + serialized_start=10221, + serialized_end=10255, +) + + +_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE = _descriptor.Descriptor( + name='DynamicMessageType', + full_name='protobuf_unittest.TestDynamicExtensions.DynamicMessageType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dynamic_field', full_name='protobuf_unittest.TestDynamicExtensions.DynamicMessageType.dynamic_field', index=0, + number=2100, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10678, + serialized_end=10722, +) + +_TESTDYNAMICEXTENSIONS = _descriptor.Descriptor( + name='TestDynamicExtensions', + full_name='protobuf_unittest.TestDynamicExtensions', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='scalar_extension', full_name='protobuf_unittest.TestDynamicExtensions.scalar_extension', index=0, + number=2000, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='enum_extension', full_name='protobuf_unittest.TestDynamicExtensions.enum_extension', index=1, + number=2001, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=4, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dynamic_enum_extension', full_name='protobuf_unittest.TestDynamicExtensions.dynamic_enum_extension', index=2, + number=2002, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=2200, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_extension', full_name='protobuf_unittest.TestDynamicExtensions.message_extension', index=3, + number=2003, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dynamic_message_extension', full_name='protobuf_unittest.TestDynamicExtensions.dynamic_message_extension', index=4, + number=2004, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_extension', full_name='protobuf_unittest.TestDynamicExtensions.repeated_extension', index=5, + number=2005, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='packed_extension', full_name='protobuf_unittest.TestDynamicExtensions.packed_extension', index=6, + number=2006, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + ], + extensions=[ + ], + nested_types=[_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE, ], + enum_types=[ + _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE, + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10258, + serialized_end=10795, +) + + +_TESTREPEATEDSCALARDIFFERENTTAGSIZES = _descriptor.Descriptor( + name='TestRepeatedScalarDifferentTagSizes', + full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_fixed32', index=0, + number=12, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_int32', index=1, + number=13, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_fixed64', index=2, + number=2046, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_int64', index=3, + number=2047, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_float', index=4, + number=262142, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='protobuf_unittest.TestRepeatedScalarDifferentTagSizes.repeated_uint64', index=5, + number=262143, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=10798, + serialized_end=10990, +) + + +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1 = _descriptor.Descriptor( + name='Group1', + full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1.field1', index=0, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11783, + serialized_end=11840, +) + +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2 = _descriptor.Descriptor( + name='Group2', + full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2.field1', index=0, + number=21, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11842, + serialized_end=11899, +) + +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR = _descriptor.Descriptor( + name='RepeatedFieldsGenerator', + full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field1', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='field2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field2', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='field3', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.field3', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='group1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.group1', index=3, + number=10, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='group2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.group2', index=4, + number=20, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ext1', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.ext1', index=5, + number=1000, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ext2', full_name='protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.ext2', index=6, + number=1001, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1, _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11345, + serialized_end=11899, +) + +_TESTPARSINGMERGE_OPTIONALGROUP = _descriptor.Descriptor( + name='OptionalGroup', + full_name='protobuf_unittest.TestParsingMerge.OptionalGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_group_all_types', full_name='protobuf_unittest.TestParsingMerge.OptionalGroup.optional_group_all_types', index=0, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11901, + serialized_end=11983, +) + +_TESTPARSINGMERGE_REPEATEDGROUP = _descriptor.Descriptor( + name='RepeatedGroup', + full_name='protobuf_unittest.TestParsingMerge.RepeatedGroup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_group_all_types', full_name='protobuf_unittest.TestParsingMerge.RepeatedGroup.repeated_group_all_types', index=0, + number=21, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=11985, + serialized_end=12067, +) + +_TESTPARSINGMERGE = _descriptor.Descriptor( + name='TestParsingMerge', + full_name='protobuf_unittest.TestParsingMerge', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='required_all_types', full_name='protobuf_unittest.TestParsingMerge.required_all_types', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_all_types', full_name='protobuf_unittest.TestParsingMerge.optional_all_types', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_all_types', full_name='protobuf_unittest.TestParsingMerge.repeated_all_types', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optionalgroup', full_name='protobuf_unittest.TestParsingMerge.optionalgroup', index=3, + number=10, type=10, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeatedgroup', full_name='protobuf_unittest.TestParsingMerge.repeatedgroup', index=4, + number=20, type=10, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + _descriptor.FieldDescriptor( + name='optional_ext', full_name='protobuf_unittest.TestParsingMerge.optional_ext', index=0, + number=1000, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_ext', full_name='protobuf_unittest.TestParsingMerge.repeated_ext', index=1, + number=1001, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None), + ], + nested_types=[_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR, _TESTPARSINGMERGE_OPTIONALGROUP, _TESTPARSINGMERGE_REPEATEDGROUP, ], + enum_types=[ + ], + options=None, + is_extendable=True, + syntax='proto2', + extension_ranges=[(1000, 536870912), ], + oneofs=[ + ], + serialized_start=10993, + serialized_end=12264, +) + + +_TESTCOMMENTINJECTIONMESSAGE = _descriptor.Descriptor( + name='TestCommentInjectionMessage', + full_name='protobuf_unittest.TestCommentInjectionMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='a', full_name='protobuf_unittest.TestCommentInjectionMessage.a', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=True, default_value=_b("*/ <- Neither should this.").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12266, + serialized_end=12334, +) + + +_FOOREQUEST = _descriptor.Descriptor( + name='FooRequest', + full_name='protobuf_unittest.FooRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12336, + serialized_end=12348, +) + + +_FOORESPONSE = _descriptor.Descriptor( + name='FooResponse', + full_name='protobuf_unittest.FooResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12350, + serialized_end=12363, +) + + +_FOOCLIENTMESSAGE = _descriptor.Descriptor( + name='FooClientMessage', + full_name='protobuf_unittest.FooClientMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12365, + serialized_end=12383, +) + + +_FOOSERVERMESSAGE = _descriptor.Descriptor( + name='FooServerMessage', + full_name='protobuf_unittest.FooServerMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12385, + serialized_end=12403, +) + + +_BARREQUEST = _descriptor.Descriptor( + name='BarRequest', + full_name='protobuf_unittest.BarRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12405, + serialized_end=12417, +) + + +_BARRESPONSE = _descriptor.Descriptor( + name='BarResponse', + full_name='protobuf_unittest.BarResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=12419, + serialized_end=12432, +) + +_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES +_TESTALLTYPES_OPTIONALGROUP.containing_type = _TESTALLTYPES +_TESTALLTYPES_REPEATEDGROUP.containing_type = _TESTALLTYPES +_TESTALLTYPES.fields_by_name['optionalgroup'].message_type = _TESTALLTYPES_OPTIONALGROUP +_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +_TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['optional_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +_TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE +_TESTALLTYPES.fields_by_name['optional_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['repeatedgroup'].message_type = _TESTALLTYPES_REPEATEDGROUP +_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +_TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['repeated_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +_TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['default_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['default_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['default_import_enum'].enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +_TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_uint32']) +_TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_nested_message']) +_TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_string']) +_TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_bytes']) +_TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_NESTEDTESTALLTYPES.fields_by_name['child'].message_type = _NESTEDTESTALLTYPES +_NESTEDTESTALLTYPES.fields_by_name['payload'].message_type = _TESTALLTYPES +_NESTEDTESTALLTYPES.fields_by_name['repeated_child'].message_type = _NESTEDTESTALLTYPES +_TESTREQUIREDFOREIGN.fields_by_name['optional_message'].message_type = _TESTREQUIRED +_TESTREQUIREDFOREIGN.fields_by_name['repeated_message'].message_type = _TESTREQUIRED +_TESTFOREIGNNESTED.fields_by_name['foreign_nested'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTRECURSIVEMESSAGE.fields_by_name['a'].message_type = _TESTRECURSIVEMESSAGE +_TESTMUTUALRECURSIONA.fields_by_name['bb'].message_type = _TESTMUTUALRECURSIONB +_TESTMUTUALRECURSIONB.fields_by_name['a'].message_type = _TESTMUTUALRECURSIONA +_TESTDUPFIELDNUMBER_FOO.containing_type = _TESTDUPFIELDNUMBER +_TESTDUPFIELDNUMBER_BAR.containing_type = _TESTDUPFIELDNUMBER +_TESTDUPFIELDNUMBER.fields_by_name['foo'].message_type = _TESTDUPFIELDNUMBER_FOO +_TESTDUPFIELDNUMBER.fields_by_name['bar'].message_type = _TESTDUPFIELDNUMBER_BAR +_TESTEAGERMESSAGE.fields_by_name['sub_message'].message_type = _TESTALLTYPES +_TESTLAZYMESSAGE.fields_by_name['sub_message'].message_type = _TESTALLTYPES +_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE.fields_by_name['nestedmessage_repeated_foreignmessage'].message_type = _FOREIGNMESSAGE +_TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE.containing_type = _TESTNESTEDMESSAGEHASBITS +_TESTNESTEDMESSAGEHASBITS.fields_by_name['optional_nested_message'].message_type = _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE +_TESTCAMELCASEFIELDNAMES.fields_by_name['EnumField'].enum_type = _FOREIGNENUM +_TESTCAMELCASEFIELDNAMES.fields_by_name['MessageField'].message_type = _FOREIGNMESSAGE +_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedEnumField'].enum_type = _FOREIGNENUM +_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedMessageField'].message_type = _FOREIGNMESSAGE +_TESTFIELDORDERINGS_NESTEDMESSAGE.containing_type = _TESTFIELDORDERINGS +_TESTFIELDORDERINGS.fields_by_name['optional_nested_message'].message_type = _TESTFIELDORDERINGS_NESTEDMESSAGE +_SPARSEENUMMESSAGE.fields_by_name['sparse_enum'].enum_type = _TESTSPARSEENUM +_TESTONEOF_FOOGROUP.containing_type = _TESTONEOF +_TESTONEOF.fields_by_name['foo_message'].message_type = _TESTALLTYPES +_TESTONEOF.fields_by_name['foogroup'].message_type = _TESTONEOF_FOOGROUP +_TESTONEOF.oneofs_by_name['foo'].fields.append( + _TESTONEOF.fields_by_name['foo_int']) +_TESTONEOF.fields_by_name['foo_int'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] +_TESTONEOF.oneofs_by_name['foo'].fields.append( + _TESTONEOF.fields_by_name['foo_string']) +_TESTONEOF.fields_by_name['foo_string'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] +_TESTONEOF.oneofs_by_name['foo'].fields.append( + _TESTONEOF.fields_by_name['foo_message']) +_TESTONEOF.fields_by_name['foo_message'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] +_TESTONEOF.oneofs_by_name['foo'].fields.append( + _TESTONEOF.fields_by_name['foogroup']) +_TESTONEOF.fields_by_name['foogroup'].containing_oneof = _TESTONEOF.oneofs_by_name['foo'] +_TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP.containing_type = _TESTONEOFBACKWARDSCOMPATIBLE +_TESTONEOFBACKWARDSCOMPATIBLE.fields_by_name['foo_message'].message_type = _TESTALLTYPES +_TESTONEOFBACKWARDSCOMPATIBLE.fields_by_name['foogroup'].message_type = _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP +_TESTONEOF2_FOOGROUP.containing_type = _TESTONEOF2 +_TESTONEOF2_NESTEDMESSAGE.containing_type = _TESTONEOF2 +_TESTONEOF2.fields_by_name['foo_enum'].enum_type = _TESTONEOF2_NESTEDENUM +_TESTONEOF2.fields_by_name['foo_message'].message_type = _TESTONEOF2_NESTEDMESSAGE +_TESTONEOF2.fields_by_name['foogroup'].message_type = _TESTONEOF2_FOOGROUP +_TESTONEOF2.fields_by_name['foo_lazy_message'].message_type = _TESTONEOF2_NESTEDMESSAGE +_TESTONEOF2.fields_by_name['bar_enum'].enum_type = _TESTONEOF2_NESTEDENUM +_TESTONEOF2_NESTEDENUM.containing_type = _TESTONEOF2 +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_int']) +_TESTONEOF2.fields_by_name['foo_int'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_string']) +_TESTONEOF2.fields_by_name['foo_string'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_cord']) +_TESTONEOF2.fields_by_name['foo_cord'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_string_piece']) +_TESTONEOF2.fields_by_name['foo_string_piece'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_bytes']) +_TESTONEOF2.fields_by_name['foo_bytes'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_enum']) +_TESTONEOF2.fields_by_name['foo_enum'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_message']) +_TESTONEOF2.fields_by_name['foo_message'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foogroup']) +_TESTONEOF2.fields_by_name['foogroup'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['foo'].fields.append( + _TESTONEOF2.fields_by_name['foo_lazy_message']) +_TESTONEOF2.fields_by_name['foo_lazy_message'].containing_oneof = _TESTONEOF2.oneofs_by_name['foo'] +_TESTONEOF2.oneofs_by_name['bar'].fields.append( + _TESTONEOF2.fields_by_name['bar_int']) +_TESTONEOF2.fields_by_name['bar_int'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] +_TESTONEOF2.oneofs_by_name['bar'].fields.append( + _TESTONEOF2.fields_by_name['bar_string']) +_TESTONEOF2.fields_by_name['bar_string'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] +_TESTONEOF2.oneofs_by_name['bar'].fields.append( + _TESTONEOF2.fields_by_name['bar_cord']) +_TESTONEOF2.fields_by_name['bar_cord'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] +_TESTONEOF2.oneofs_by_name['bar'].fields.append( + _TESTONEOF2.fields_by_name['bar_string_piece']) +_TESTONEOF2.fields_by_name['bar_string_piece'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] +_TESTONEOF2.oneofs_by_name['bar'].fields.append( + _TESTONEOF2.fields_by_name['bar_bytes']) +_TESTONEOF2.fields_by_name['bar_bytes'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] +_TESTONEOF2.oneofs_by_name['bar'].fields.append( + _TESTONEOF2.fields_by_name['bar_enum']) +_TESTONEOF2.fields_by_name['bar_enum'].containing_oneof = _TESTONEOF2.oneofs_by_name['bar'] +_TESTREQUIREDONEOF_NESTEDMESSAGE.containing_type = _TESTREQUIREDONEOF +_TESTREQUIREDONEOF.fields_by_name['foo_message'].message_type = _TESTREQUIREDONEOF_NESTEDMESSAGE +_TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append( + _TESTREQUIREDONEOF.fields_by_name['foo_int']) +_TESTREQUIREDONEOF.fields_by_name['foo_int'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo'] +_TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append( + _TESTREQUIREDONEOF.fields_by_name['foo_string']) +_TESTREQUIREDONEOF.fields_by_name['foo_string'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo'] +_TESTREQUIREDONEOF.oneofs_by_name['foo'].fields.append( + _TESTREQUIREDONEOF.fields_by_name['foo_message']) +_TESTREQUIREDONEOF.fields_by_name['foo_message'].containing_oneof = _TESTREQUIREDONEOF.oneofs_by_name['foo'] +_TESTPACKEDTYPES.fields_by_name['packed_enum'].enum_type = _FOREIGNENUM +_TESTUNPACKEDTYPES.fields_by_name['unpacked_enum'].enum_type = _FOREIGNENUM +_TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE.containing_type = _TESTDYNAMICEXTENSIONS +_TESTDYNAMICEXTENSIONS.fields_by_name['enum_extension'].enum_type = _FOREIGNENUM +_TESTDYNAMICEXTENSIONS.fields_by_name['dynamic_enum_extension'].enum_type = _TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE +_TESTDYNAMICEXTENSIONS.fields_by_name['message_extension'].message_type = _FOREIGNMESSAGE +_TESTDYNAMICEXTENSIONS.fields_by_name['dynamic_message_extension'].message_type = _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE +_TESTDYNAMICEXTENSIONS_DYNAMICENUMTYPE.containing_type = _TESTDYNAMICEXTENSIONS +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1.fields_by_name['field1'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1.containing_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2.fields_by_name['field1'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2.containing_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field1'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field2'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['field3'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['group1'].message_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1 +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['group2'].message_type = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2 +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['ext1'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.fields_by_name['ext2'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR.containing_type = _TESTPARSINGMERGE +_TESTPARSINGMERGE_OPTIONALGROUP.fields_by_name['optional_group_all_types'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_OPTIONALGROUP.containing_type = _TESTPARSINGMERGE +_TESTPARSINGMERGE_REPEATEDGROUP.fields_by_name['repeated_group_all_types'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE_REPEATEDGROUP.containing_type = _TESTPARSINGMERGE +_TESTPARSINGMERGE.fields_by_name['required_all_types'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE.fields_by_name['optional_all_types'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE.fields_by_name['repeated_all_types'].message_type = _TESTALLTYPES +_TESTPARSINGMERGE.fields_by_name['optionalgroup'].message_type = _TESTPARSINGMERGE_OPTIONALGROUP +_TESTPARSINGMERGE.fields_by_name['repeatedgroup'].message_type = _TESTPARSINGMERGE_REPEATEDGROUP +DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES +DESCRIPTOR.message_types_by_name['NestedTestAllTypes'] = _NESTEDTESTALLTYPES +DESCRIPTOR.message_types_by_name['TestDeprecatedFields'] = _TESTDEPRECATEDFIELDS +DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE +DESCRIPTOR.message_types_by_name['TestReservedFields'] = _TESTRESERVEDFIELDS +DESCRIPTOR.message_types_by_name['TestAllExtensions'] = _TESTALLEXTENSIONS +DESCRIPTOR.message_types_by_name['OptionalGroup_extension'] = _OPTIONALGROUP_EXTENSION +DESCRIPTOR.message_types_by_name['RepeatedGroup_extension'] = _REPEATEDGROUP_EXTENSION +DESCRIPTOR.message_types_by_name['TestNestedExtension'] = _TESTNESTEDEXTENSION +DESCRIPTOR.message_types_by_name['TestRequired'] = _TESTREQUIRED +DESCRIPTOR.message_types_by_name['TestRequiredForeign'] = _TESTREQUIREDFOREIGN +DESCRIPTOR.message_types_by_name['TestForeignNested'] = _TESTFOREIGNNESTED +DESCRIPTOR.message_types_by_name['TestEmptyMessage'] = _TESTEMPTYMESSAGE +DESCRIPTOR.message_types_by_name['TestEmptyMessageWithExtensions'] = _TESTEMPTYMESSAGEWITHEXTENSIONS +DESCRIPTOR.message_types_by_name['TestMultipleExtensionRanges'] = _TESTMULTIPLEEXTENSIONRANGES +DESCRIPTOR.message_types_by_name['TestReallyLargeTagNumber'] = _TESTREALLYLARGETAGNUMBER +DESCRIPTOR.message_types_by_name['TestRecursiveMessage'] = _TESTRECURSIVEMESSAGE +DESCRIPTOR.message_types_by_name['TestMutualRecursionA'] = _TESTMUTUALRECURSIONA +DESCRIPTOR.message_types_by_name['TestMutualRecursionB'] = _TESTMUTUALRECURSIONB +DESCRIPTOR.message_types_by_name['TestDupFieldNumber'] = _TESTDUPFIELDNUMBER +DESCRIPTOR.message_types_by_name['TestEagerMessage'] = _TESTEAGERMESSAGE +DESCRIPTOR.message_types_by_name['TestLazyMessage'] = _TESTLAZYMESSAGE +DESCRIPTOR.message_types_by_name['TestNestedMessageHasBits'] = _TESTNESTEDMESSAGEHASBITS +DESCRIPTOR.message_types_by_name['TestCamelCaseFieldNames'] = _TESTCAMELCASEFIELDNAMES +DESCRIPTOR.message_types_by_name['TestFieldOrderings'] = _TESTFIELDORDERINGS +DESCRIPTOR.message_types_by_name['TestExtremeDefaultValues'] = _TESTEXTREMEDEFAULTVALUES +DESCRIPTOR.message_types_by_name['SparseEnumMessage'] = _SPARSEENUMMESSAGE +DESCRIPTOR.message_types_by_name['OneString'] = _ONESTRING +DESCRIPTOR.message_types_by_name['MoreString'] = _MORESTRING +DESCRIPTOR.message_types_by_name['OneBytes'] = _ONEBYTES +DESCRIPTOR.message_types_by_name['MoreBytes'] = _MOREBYTES +DESCRIPTOR.message_types_by_name['Int32Message'] = _INT32MESSAGE +DESCRIPTOR.message_types_by_name['Uint32Message'] = _UINT32MESSAGE +DESCRIPTOR.message_types_by_name['Int64Message'] = _INT64MESSAGE +DESCRIPTOR.message_types_by_name['Uint64Message'] = _UINT64MESSAGE +DESCRIPTOR.message_types_by_name['BoolMessage'] = _BOOLMESSAGE +DESCRIPTOR.message_types_by_name['TestOneof'] = _TESTONEOF +DESCRIPTOR.message_types_by_name['TestOneofBackwardsCompatible'] = _TESTONEOFBACKWARDSCOMPATIBLE +DESCRIPTOR.message_types_by_name['TestOneof2'] = _TESTONEOF2 +DESCRIPTOR.message_types_by_name['TestRequiredOneof'] = _TESTREQUIREDONEOF +DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES +DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES +DESCRIPTOR.message_types_by_name['TestPackedExtensions'] = _TESTPACKEDEXTENSIONS +DESCRIPTOR.message_types_by_name['TestUnpackedExtensions'] = _TESTUNPACKEDEXTENSIONS +DESCRIPTOR.message_types_by_name['TestDynamicExtensions'] = _TESTDYNAMICEXTENSIONS +DESCRIPTOR.message_types_by_name['TestRepeatedScalarDifferentTagSizes'] = _TESTREPEATEDSCALARDIFFERENTTAGSIZES +DESCRIPTOR.message_types_by_name['TestParsingMerge'] = _TESTPARSINGMERGE +DESCRIPTOR.message_types_by_name['TestCommentInjectionMessage'] = _TESTCOMMENTINJECTIONMESSAGE +DESCRIPTOR.message_types_by_name['FooRequest'] = _FOOREQUEST +DESCRIPTOR.message_types_by_name['FooResponse'] = _FOORESPONSE +DESCRIPTOR.message_types_by_name['FooClientMessage'] = _FOOCLIENTMESSAGE +DESCRIPTOR.message_types_by_name['FooServerMessage'] = _FOOSERVERMESSAGE +DESCRIPTOR.message_types_by_name['BarRequest'] = _BARREQUEST +DESCRIPTOR.message_types_by_name['BarResponse'] = _BARRESPONSE +DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM +DESCRIPTOR.enum_types_by_name['TestEnumWithDupValue'] = _TESTENUMWITHDUPVALUE +DESCRIPTOR.enum_types_by_name['TestSparseEnum'] = _TESTSPARSEENUM +DESCRIPTOR.extensions_by_name['optional_int32_extension'] = optional_int32_extension +DESCRIPTOR.extensions_by_name['optional_int64_extension'] = optional_int64_extension +DESCRIPTOR.extensions_by_name['optional_uint32_extension'] = optional_uint32_extension +DESCRIPTOR.extensions_by_name['optional_uint64_extension'] = optional_uint64_extension +DESCRIPTOR.extensions_by_name['optional_sint32_extension'] = optional_sint32_extension +DESCRIPTOR.extensions_by_name['optional_sint64_extension'] = optional_sint64_extension +DESCRIPTOR.extensions_by_name['optional_fixed32_extension'] = optional_fixed32_extension +DESCRIPTOR.extensions_by_name['optional_fixed64_extension'] = optional_fixed64_extension +DESCRIPTOR.extensions_by_name['optional_sfixed32_extension'] = optional_sfixed32_extension +DESCRIPTOR.extensions_by_name['optional_sfixed64_extension'] = optional_sfixed64_extension +DESCRIPTOR.extensions_by_name['optional_float_extension'] = optional_float_extension +DESCRIPTOR.extensions_by_name['optional_double_extension'] = optional_double_extension +DESCRIPTOR.extensions_by_name['optional_bool_extension'] = optional_bool_extension +DESCRIPTOR.extensions_by_name['optional_string_extension'] = optional_string_extension +DESCRIPTOR.extensions_by_name['optional_bytes_extension'] = optional_bytes_extension +DESCRIPTOR.extensions_by_name['optionalgroup_extension'] = optionalgroup_extension +DESCRIPTOR.extensions_by_name['optional_nested_message_extension'] = optional_nested_message_extension +DESCRIPTOR.extensions_by_name['optional_foreign_message_extension'] = optional_foreign_message_extension +DESCRIPTOR.extensions_by_name['optional_import_message_extension'] = optional_import_message_extension +DESCRIPTOR.extensions_by_name['optional_nested_enum_extension'] = optional_nested_enum_extension +DESCRIPTOR.extensions_by_name['optional_foreign_enum_extension'] = optional_foreign_enum_extension +DESCRIPTOR.extensions_by_name['optional_import_enum_extension'] = optional_import_enum_extension +DESCRIPTOR.extensions_by_name['optional_string_piece_extension'] = optional_string_piece_extension +DESCRIPTOR.extensions_by_name['optional_cord_extension'] = optional_cord_extension +DESCRIPTOR.extensions_by_name['optional_public_import_message_extension'] = optional_public_import_message_extension +DESCRIPTOR.extensions_by_name['optional_lazy_message_extension'] = optional_lazy_message_extension +DESCRIPTOR.extensions_by_name['repeated_int32_extension'] = repeated_int32_extension +DESCRIPTOR.extensions_by_name['repeated_int64_extension'] = repeated_int64_extension +DESCRIPTOR.extensions_by_name['repeated_uint32_extension'] = repeated_uint32_extension +DESCRIPTOR.extensions_by_name['repeated_uint64_extension'] = repeated_uint64_extension +DESCRIPTOR.extensions_by_name['repeated_sint32_extension'] = repeated_sint32_extension +DESCRIPTOR.extensions_by_name['repeated_sint64_extension'] = repeated_sint64_extension +DESCRIPTOR.extensions_by_name['repeated_fixed32_extension'] = repeated_fixed32_extension +DESCRIPTOR.extensions_by_name['repeated_fixed64_extension'] = repeated_fixed64_extension +DESCRIPTOR.extensions_by_name['repeated_sfixed32_extension'] = repeated_sfixed32_extension +DESCRIPTOR.extensions_by_name['repeated_sfixed64_extension'] = repeated_sfixed64_extension +DESCRIPTOR.extensions_by_name['repeated_float_extension'] = repeated_float_extension +DESCRIPTOR.extensions_by_name['repeated_double_extension'] = repeated_double_extension +DESCRIPTOR.extensions_by_name['repeated_bool_extension'] = repeated_bool_extension +DESCRIPTOR.extensions_by_name['repeated_string_extension'] = repeated_string_extension +DESCRIPTOR.extensions_by_name['repeated_bytes_extension'] = repeated_bytes_extension +DESCRIPTOR.extensions_by_name['repeatedgroup_extension'] = repeatedgroup_extension +DESCRIPTOR.extensions_by_name['repeated_nested_message_extension'] = repeated_nested_message_extension +DESCRIPTOR.extensions_by_name['repeated_foreign_message_extension'] = repeated_foreign_message_extension +DESCRIPTOR.extensions_by_name['repeated_import_message_extension'] = repeated_import_message_extension +DESCRIPTOR.extensions_by_name['repeated_nested_enum_extension'] = repeated_nested_enum_extension +DESCRIPTOR.extensions_by_name['repeated_foreign_enum_extension'] = repeated_foreign_enum_extension +DESCRIPTOR.extensions_by_name['repeated_import_enum_extension'] = repeated_import_enum_extension +DESCRIPTOR.extensions_by_name['repeated_string_piece_extension'] = repeated_string_piece_extension +DESCRIPTOR.extensions_by_name['repeated_cord_extension'] = repeated_cord_extension +DESCRIPTOR.extensions_by_name['repeated_lazy_message_extension'] = repeated_lazy_message_extension +DESCRIPTOR.extensions_by_name['default_int32_extension'] = default_int32_extension +DESCRIPTOR.extensions_by_name['default_int64_extension'] = default_int64_extension +DESCRIPTOR.extensions_by_name['default_uint32_extension'] = default_uint32_extension +DESCRIPTOR.extensions_by_name['default_uint64_extension'] = default_uint64_extension +DESCRIPTOR.extensions_by_name['default_sint32_extension'] = default_sint32_extension +DESCRIPTOR.extensions_by_name['default_sint64_extension'] = default_sint64_extension +DESCRIPTOR.extensions_by_name['default_fixed32_extension'] = default_fixed32_extension +DESCRIPTOR.extensions_by_name['default_fixed64_extension'] = default_fixed64_extension +DESCRIPTOR.extensions_by_name['default_sfixed32_extension'] = default_sfixed32_extension +DESCRIPTOR.extensions_by_name['default_sfixed64_extension'] = default_sfixed64_extension +DESCRIPTOR.extensions_by_name['default_float_extension'] = default_float_extension +DESCRIPTOR.extensions_by_name['default_double_extension'] = default_double_extension +DESCRIPTOR.extensions_by_name['default_bool_extension'] = default_bool_extension +DESCRIPTOR.extensions_by_name['default_string_extension'] = default_string_extension +DESCRIPTOR.extensions_by_name['default_bytes_extension'] = default_bytes_extension +DESCRIPTOR.extensions_by_name['default_nested_enum_extension'] = default_nested_enum_extension +DESCRIPTOR.extensions_by_name['default_foreign_enum_extension'] = default_foreign_enum_extension +DESCRIPTOR.extensions_by_name['default_import_enum_extension'] = default_import_enum_extension +DESCRIPTOR.extensions_by_name['default_string_piece_extension'] = default_string_piece_extension +DESCRIPTOR.extensions_by_name['default_cord_extension'] = default_cord_extension +DESCRIPTOR.extensions_by_name['oneof_uint32_extension'] = oneof_uint32_extension +DESCRIPTOR.extensions_by_name['oneof_nested_message_extension'] = oneof_nested_message_extension +DESCRIPTOR.extensions_by_name['oneof_string_extension'] = oneof_string_extension +DESCRIPTOR.extensions_by_name['oneof_bytes_extension'] = oneof_bytes_extension +DESCRIPTOR.extensions_by_name['my_extension_string'] = my_extension_string +DESCRIPTOR.extensions_by_name['my_extension_int'] = my_extension_int +DESCRIPTOR.extensions_by_name['packed_int32_extension'] = packed_int32_extension +DESCRIPTOR.extensions_by_name['packed_int64_extension'] = packed_int64_extension +DESCRIPTOR.extensions_by_name['packed_uint32_extension'] = packed_uint32_extension +DESCRIPTOR.extensions_by_name['packed_uint64_extension'] = packed_uint64_extension +DESCRIPTOR.extensions_by_name['packed_sint32_extension'] = packed_sint32_extension +DESCRIPTOR.extensions_by_name['packed_sint64_extension'] = packed_sint64_extension +DESCRIPTOR.extensions_by_name['packed_fixed32_extension'] = packed_fixed32_extension +DESCRIPTOR.extensions_by_name['packed_fixed64_extension'] = packed_fixed64_extension +DESCRIPTOR.extensions_by_name['packed_sfixed32_extension'] = packed_sfixed32_extension +DESCRIPTOR.extensions_by_name['packed_sfixed64_extension'] = packed_sfixed64_extension +DESCRIPTOR.extensions_by_name['packed_float_extension'] = packed_float_extension +DESCRIPTOR.extensions_by_name['packed_double_extension'] = packed_double_extension +DESCRIPTOR.extensions_by_name['packed_bool_extension'] = packed_bool_extension +DESCRIPTOR.extensions_by_name['packed_enum_extension'] = packed_enum_extension +DESCRIPTOR.extensions_by_name['unpacked_int32_extension'] = unpacked_int32_extension +DESCRIPTOR.extensions_by_name['unpacked_int64_extension'] = unpacked_int64_extension +DESCRIPTOR.extensions_by_name['unpacked_uint32_extension'] = unpacked_uint32_extension +DESCRIPTOR.extensions_by_name['unpacked_uint64_extension'] = unpacked_uint64_extension +DESCRIPTOR.extensions_by_name['unpacked_sint32_extension'] = unpacked_sint32_extension +DESCRIPTOR.extensions_by_name['unpacked_sint64_extension'] = unpacked_sint64_extension +DESCRIPTOR.extensions_by_name['unpacked_fixed32_extension'] = unpacked_fixed32_extension +DESCRIPTOR.extensions_by_name['unpacked_fixed64_extension'] = unpacked_fixed64_extension +DESCRIPTOR.extensions_by_name['unpacked_sfixed32_extension'] = unpacked_sfixed32_extension +DESCRIPTOR.extensions_by_name['unpacked_sfixed64_extension'] = unpacked_sfixed64_extension +DESCRIPTOR.extensions_by_name['unpacked_float_extension'] = unpacked_float_extension +DESCRIPTOR.extensions_by_name['unpacked_double_extension'] = unpacked_double_extension +DESCRIPTOR.extensions_by_name['unpacked_bool_extension'] = unpacked_bool_extension +DESCRIPTOR.extensions_by_name['unpacked_enum_extension'] = unpacked_enum_extension + +TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.NestedMessage) + )) + , + + OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_OPTIONALGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.OptionalGroup) + )) + , + + RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_REPEATEDGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes.RepeatedGroup) + )) + , + DESCRIPTOR = _TESTALLTYPES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllTypes) + )) +_sym_db.RegisterMessage(TestAllTypes) +_sym_db.RegisterMessage(TestAllTypes.NestedMessage) +_sym_db.RegisterMessage(TestAllTypes.OptionalGroup) +_sym_db.RegisterMessage(TestAllTypes.RepeatedGroup) + +NestedTestAllTypes = _reflection.GeneratedProtocolMessageType('NestedTestAllTypes', (_message.Message,), dict( + DESCRIPTOR = _NESTEDTESTALLTYPES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.NestedTestAllTypes) + )) +_sym_db.RegisterMessage(NestedTestAllTypes) + +TestDeprecatedFields = _reflection.GeneratedProtocolMessageType('TestDeprecatedFields', (_message.Message,), dict( + DESCRIPTOR = _TESTDEPRECATEDFIELDS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDeprecatedFields) + )) +_sym_db.RegisterMessage(TestDeprecatedFields) + +ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict( + DESCRIPTOR = _FOREIGNMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.ForeignMessage) + )) +_sym_db.RegisterMessage(ForeignMessage) + +TestReservedFields = _reflection.GeneratedProtocolMessageType('TestReservedFields', (_message.Message,), dict( + DESCRIPTOR = _TESTRESERVEDFIELDS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestReservedFields) + )) +_sym_db.RegisterMessage(TestReservedFields) + +TestAllExtensions = _reflection.GeneratedProtocolMessageType('TestAllExtensions', (_message.Message,), dict( + DESCRIPTOR = _TESTALLEXTENSIONS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestAllExtensions) + )) +_sym_db.RegisterMessage(TestAllExtensions) + +OptionalGroup_extension = _reflection.GeneratedProtocolMessageType('OptionalGroup_extension', (_message.Message,), dict( + DESCRIPTOR = _OPTIONALGROUP_EXTENSION, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.OptionalGroup_extension) + )) +_sym_db.RegisterMessage(OptionalGroup_extension) + +RepeatedGroup_extension = _reflection.GeneratedProtocolMessageType('RepeatedGroup_extension', (_message.Message,), dict( + DESCRIPTOR = _REPEATEDGROUP_EXTENSION, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.RepeatedGroup_extension) + )) +_sym_db.RegisterMessage(RepeatedGroup_extension) + +TestNestedExtension = _reflection.GeneratedProtocolMessageType('TestNestedExtension', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDEXTENSION, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedExtension) + )) +_sym_db.RegisterMessage(TestNestedExtension) + +TestRequired = _reflection.GeneratedProtocolMessageType('TestRequired', (_message.Message,), dict( + DESCRIPTOR = _TESTREQUIRED, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequired) + )) +_sym_db.RegisterMessage(TestRequired) + +TestRequiredForeign = _reflection.GeneratedProtocolMessageType('TestRequiredForeign', (_message.Message,), dict( + DESCRIPTOR = _TESTREQUIREDFOREIGN, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredForeign) + )) +_sym_db.RegisterMessage(TestRequiredForeign) + +TestForeignNested = _reflection.GeneratedProtocolMessageType('TestForeignNested', (_message.Message,), dict( + DESCRIPTOR = _TESTFOREIGNNESTED, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestForeignNested) + )) +_sym_db.RegisterMessage(TestForeignNested) + +TestEmptyMessage = _reflection.GeneratedProtocolMessageType('TestEmptyMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTEMPTYMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEmptyMessage) + )) +_sym_db.RegisterMessage(TestEmptyMessage) + +TestEmptyMessageWithExtensions = _reflection.GeneratedProtocolMessageType('TestEmptyMessageWithExtensions', (_message.Message,), dict( + DESCRIPTOR = _TESTEMPTYMESSAGEWITHEXTENSIONS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEmptyMessageWithExtensions) + )) +_sym_db.RegisterMessage(TestEmptyMessageWithExtensions) + +TestMultipleExtensionRanges = _reflection.GeneratedProtocolMessageType('TestMultipleExtensionRanges', (_message.Message,), dict( + DESCRIPTOR = _TESTMULTIPLEEXTENSIONRANGES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMultipleExtensionRanges) + )) +_sym_db.RegisterMessage(TestMultipleExtensionRanges) + +TestReallyLargeTagNumber = _reflection.GeneratedProtocolMessageType('TestReallyLargeTagNumber', (_message.Message,), dict( + DESCRIPTOR = _TESTREALLYLARGETAGNUMBER, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestReallyLargeTagNumber) + )) +_sym_db.RegisterMessage(TestReallyLargeTagNumber) + +TestRecursiveMessage = _reflection.GeneratedProtocolMessageType('TestRecursiveMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTRECURSIVEMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursiveMessage) + )) +_sym_db.RegisterMessage(TestRecursiveMessage) + +TestMutualRecursionA = _reflection.GeneratedProtocolMessageType('TestMutualRecursionA', (_message.Message,), dict( + DESCRIPTOR = _TESTMUTUALRECURSIONA, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionA) + )) +_sym_db.RegisterMessage(TestMutualRecursionA) + +TestMutualRecursionB = _reflection.GeneratedProtocolMessageType('TestMutualRecursionB', (_message.Message,), dict( + DESCRIPTOR = _TESTMUTUALRECURSIONB, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMutualRecursionB) + )) +_sym_db.RegisterMessage(TestMutualRecursionB) + +TestDupFieldNumber = _reflection.GeneratedProtocolMessageType('TestDupFieldNumber', (_message.Message,), dict( + + Foo = _reflection.GeneratedProtocolMessageType('Foo', (_message.Message,), dict( + DESCRIPTOR = _TESTDUPFIELDNUMBER_FOO, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber.Foo) + )) + , + + Bar = _reflection.GeneratedProtocolMessageType('Bar', (_message.Message,), dict( + DESCRIPTOR = _TESTDUPFIELDNUMBER_BAR, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber.Bar) + )) + , + DESCRIPTOR = _TESTDUPFIELDNUMBER, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDupFieldNumber) + )) +_sym_db.RegisterMessage(TestDupFieldNumber) +_sym_db.RegisterMessage(TestDupFieldNumber.Foo) +_sym_db.RegisterMessage(TestDupFieldNumber.Bar) + +TestEagerMessage = _reflection.GeneratedProtocolMessageType('TestEagerMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTEAGERMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestEagerMessage) + )) +_sym_db.RegisterMessage(TestEagerMessage) + +TestLazyMessage = _reflection.GeneratedProtocolMessageType('TestLazyMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTLAZYMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestLazyMessage) + )) +_sym_db.RegisterMessage(TestLazyMessage) + +TestNestedMessageHasBits = _reflection.GeneratedProtocolMessageType('TestNestedMessageHasBits', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMESSAGEHASBITS_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedMessageHasBits.NestedMessage) + )) + , + DESCRIPTOR = _TESTNESTEDMESSAGEHASBITS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNestedMessageHasBits) + )) +_sym_db.RegisterMessage(TestNestedMessageHasBits) +_sym_db.RegisterMessage(TestNestedMessageHasBits.NestedMessage) + +TestCamelCaseFieldNames = _reflection.GeneratedProtocolMessageType('TestCamelCaseFieldNames', (_message.Message,), dict( + DESCRIPTOR = _TESTCAMELCASEFIELDNAMES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCamelCaseFieldNames) + )) +_sym_db.RegisterMessage(TestCamelCaseFieldNames) + +TestFieldOrderings = _reflection.GeneratedProtocolMessageType('TestFieldOrderings', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTFIELDORDERINGS_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFieldOrderings.NestedMessage) + )) + , + DESCRIPTOR = _TESTFIELDORDERINGS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFieldOrderings) + )) +_sym_db.RegisterMessage(TestFieldOrderings) +_sym_db.RegisterMessage(TestFieldOrderings.NestedMessage) + +TestExtremeDefaultValues = _reflection.GeneratedProtocolMessageType('TestExtremeDefaultValues', (_message.Message,), dict( + DESCRIPTOR = _TESTEXTREMEDEFAULTVALUES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtremeDefaultValues) + )) +_sym_db.RegisterMessage(TestExtremeDefaultValues) + +SparseEnumMessage = _reflection.GeneratedProtocolMessageType('SparseEnumMessage', (_message.Message,), dict( + DESCRIPTOR = _SPARSEENUMMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.SparseEnumMessage) + )) +_sym_db.RegisterMessage(SparseEnumMessage) + +OneString = _reflection.GeneratedProtocolMessageType('OneString', (_message.Message,), dict( + DESCRIPTOR = _ONESTRING, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.OneString) + )) +_sym_db.RegisterMessage(OneString) + +MoreString = _reflection.GeneratedProtocolMessageType('MoreString', (_message.Message,), dict( + DESCRIPTOR = _MORESTRING, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.MoreString) + )) +_sym_db.RegisterMessage(MoreString) + +OneBytes = _reflection.GeneratedProtocolMessageType('OneBytes', (_message.Message,), dict( + DESCRIPTOR = _ONEBYTES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.OneBytes) + )) +_sym_db.RegisterMessage(OneBytes) + +MoreBytes = _reflection.GeneratedProtocolMessageType('MoreBytes', (_message.Message,), dict( + DESCRIPTOR = _MOREBYTES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.MoreBytes) + )) +_sym_db.RegisterMessage(MoreBytes) + +Int32Message = _reflection.GeneratedProtocolMessageType('Int32Message', (_message.Message,), dict( + DESCRIPTOR = _INT32MESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.Int32Message) + )) +_sym_db.RegisterMessage(Int32Message) + +Uint32Message = _reflection.GeneratedProtocolMessageType('Uint32Message', (_message.Message,), dict( + DESCRIPTOR = _UINT32MESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.Uint32Message) + )) +_sym_db.RegisterMessage(Uint32Message) + +Int64Message = _reflection.GeneratedProtocolMessageType('Int64Message', (_message.Message,), dict( + DESCRIPTOR = _INT64MESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.Int64Message) + )) +_sym_db.RegisterMessage(Int64Message) + +Uint64Message = _reflection.GeneratedProtocolMessageType('Uint64Message', (_message.Message,), dict( + DESCRIPTOR = _UINT64MESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.Uint64Message) + )) +_sym_db.RegisterMessage(Uint64Message) + +BoolMessage = _reflection.GeneratedProtocolMessageType('BoolMessage', (_message.Message,), dict( + DESCRIPTOR = _BOOLMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.BoolMessage) + )) +_sym_db.RegisterMessage(BoolMessage) + +TestOneof = _reflection.GeneratedProtocolMessageType('TestOneof', (_message.Message,), dict( + + FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTONEOF_FOOGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof.FooGroup) + )) + , + DESCRIPTOR = _TESTONEOF, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof) + )) +_sym_db.RegisterMessage(TestOneof) +_sym_db.RegisterMessage(TestOneof.FooGroup) + +TestOneofBackwardsCompatible = _reflection.GeneratedProtocolMessageType('TestOneofBackwardsCompatible', (_message.Message,), dict( + + FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTONEOFBACKWARDSCOMPATIBLE_FOOGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneofBackwardsCompatible.FooGroup) + )) + , + DESCRIPTOR = _TESTONEOFBACKWARDSCOMPATIBLE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneofBackwardsCompatible) + )) +_sym_db.RegisterMessage(TestOneofBackwardsCompatible) +_sym_db.RegisterMessage(TestOneofBackwardsCompatible.FooGroup) + +TestOneof2 = _reflection.GeneratedProtocolMessageType('TestOneof2', (_message.Message,), dict( + + FooGroup = _reflection.GeneratedProtocolMessageType('FooGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTONEOF2_FOOGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2.FooGroup) + )) + , + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTONEOF2_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2.NestedMessage) + )) + , + DESCRIPTOR = _TESTONEOF2, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestOneof2) + )) +_sym_db.RegisterMessage(TestOneof2) +_sym_db.RegisterMessage(TestOneof2.FooGroup) +_sym_db.RegisterMessage(TestOneof2.NestedMessage) + +TestRequiredOneof = _reflection.GeneratedProtocolMessageType('TestRequiredOneof', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTREQUIREDONEOF_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredOneof.NestedMessage) + )) + , + DESCRIPTOR = _TESTREQUIREDONEOF, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRequiredOneof) + )) +_sym_db.RegisterMessage(TestRequiredOneof) +_sym_db.RegisterMessage(TestRequiredOneof.NestedMessage) + +TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict( + DESCRIPTOR = _TESTPACKEDTYPES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestPackedTypes) + )) +_sym_db.RegisterMessage(TestPackedTypes) + +TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict( + DESCRIPTOR = _TESTUNPACKEDTYPES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestUnpackedTypes) + )) +_sym_db.RegisterMessage(TestUnpackedTypes) + +TestPackedExtensions = _reflection.GeneratedProtocolMessageType('TestPackedExtensions', (_message.Message,), dict( + DESCRIPTOR = _TESTPACKEDEXTENSIONS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestPackedExtensions) + )) +_sym_db.RegisterMessage(TestPackedExtensions) + +TestUnpackedExtensions = _reflection.GeneratedProtocolMessageType('TestUnpackedExtensions', (_message.Message,), dict( + DESCRIPTOR = _TESTUNPACKEDEXTENSIONS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestUnpackedExtensions) + )) +_sym_db.RegisterMessage(TestUnpackedExtensions) + +TestDynamicExtensions = _reflection.GeneratedProtocolMessageType('TestDynamicExtensions', (_message.Message,), dict( + + DynamicMessageType = _reflection.GeneratedProtocolMessageType('DynamicMessageType', (_message.Message,), dict( + DESCRIPTOR = _TESTDYNAMICEXTENSIONS_DYNAMICMESSAGETYPE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDynamicExtensions.DynamicMessageType) + )) + , + DESCRIPTOR = _TESTDYNAMICEXTENSIONS, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDynamicExtensions) + )) +_sym_db.RegisterMessage(TestDynamicExtensions) +_sym_db.RegisterMessage(TestDynamicExtensions.DynamicMessageType) + +TestRepeatedScalarDifferentTagSizes = _reflection.GeneratedProtocolMessageType('TestRepeatedScalarDifferentTagSizes', (_message.Message,), dict( + DESCRIPTOR = _TESTREPEATEDSCALARDIFFERENTTAGSIZES, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRepeatedScalarDifferentTagSizes) + )) +_sym_db.RegisterMessage(TestRepeatedScalarDifferentTagSizes) + +TestParsingMerge = _reflection.GeneratedProtocolMessageType('TestParsingMerge', (_message.Message,), dict( + + RepeatedFieldsGenerator = _reflection.GeneratedProtocolMessageType('RepeatedFieldsGenerator', (_message.Message,), dict( + + Group1 = _reflection.GeneratedProtocolMessageType('Group1', (_message.Message,), dict( + DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP1, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group1) + )) + , + + Group2 = _reflection.GeneratedProtocolMessageType('Group2', (_message.Message,), dict( + DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR_GROUP2, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator.Group2) + )) + , + DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDFIELDSGENERATOR, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedFieldsGenerator) + )) + , + + OptionalGroup = _reflection.GeneratedProtocolMessageType('OptionalGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTPARSINGMERGE_OPTIONALGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.OptionalGroup) + )) + , + + RepeatedGroup = _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), dict( + DESCRIPTOR = _TESTPARSINGMERGE_REPEATEDGROUP, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge.RepeatedGroup) + )) + , + DESCRIPTOR = _TESTPARSINGMERGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestParsingMerge) + )) +_sym_db.RegisterMessage(TestParsingMerge) +_sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator) +_sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator.Group1) +_sym_db.RegisterMessage(TestParsingMerge.RepeatedFieldsGenerator.Group2) +_sym_db.RegisterMessage(TestParsingMerge.OptionalGroup) +_sym_db.RegisterMessage(TestParsingMerge.RepeatedGroup) + +TestCommentInjectionMessage = _reflection.GeneratedProtocolMessageType('TestCommentInjectionMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTCOMMENTINJECTIONMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCommentInjectionMessage) + )) +_sym_db.RegisterMessage(TestCommentInjectionMessage) + +FooRequest = _reflection.GeneratedProtocolMessageType('FooRequest', (_message.Message,), dict( + DESCRIPTOR = _FOOREQUEST, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.FooRequest) + )) +_sym_db.RegisterMessage(FooRequest) + +FooResponse = _reflection.GeneratedProtocolMessageType('FooResponse', (_message.Message,), dict( + DESCRIPTOR = _FOORESPONSE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.FooResponse) + )) +_sym_db.RegisterMessage(FooResponse) + +FooClientMessage = _reflection.GeneratedProtocolMessageType('FooClientMessage', (_message.Message,), dict( + DESCRIPTOR = _FOOCLIENTMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.FooClientMessage) + )) +_sym_db.RegisterMessage(FooClientMessage) + +FooServerMessage = _reflection.GeneratedProtocolMessageType('FooServerMessage', (_message.Message,), dict( + DESCRIPTOR = _FOOSERVERMESSAGE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.FooServerMessage) + )) +_sym_db.RegisterMessage(FooServerMessage) + +BarRequest = _reflection.GeneratedProtocolMessageType('BarRequest', (_message.Message,), dict( + DESCRIPTOR = _BARREQUEST, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.BarRequest) + )) +_sym_db.RegisterMessage(BarRequest) + +BarResponse = _reflection.GeneratedProtocolMessageType('BarResponse', (_message.Message,), dict( + DESCRIPTOR = _BARRESPONSE, + __module__ = 'google.protobuf.unittest_pb2' + # @@protoc_insertion_point(class_scope:protobuf_unittest.BarResponse) + )) +_sym_db.RegisterMessage(BarResponse) + +TestAllExtensions.RegisterExtension(optional_int32_extension) +TestAllExtensions.RegisterExtension(optional_int64_extension) +TestAllExtensions.RegisterExtension(optional_uint32_extension) +TestAllExtensions.RegisterExtension(optional_uint64_extension) +TestAllExtensions.RegisterExtension(optional_sint32_extension) +TestAllExtensions.RegisterExtension(optional_sint64_extension) +TestAllExtensions.RegisterExtension(optional_fixed32_extension) +TestAllExtensions.RegisterExtension(optional_fixed64_extension) +TestAllExtensions.RegisterExtension(optional_sfixed32_extension) +TestAllExtensions.RegisterExtension(optional_sfixed64_extension) +TestAllExtensions.RegisterExtension(optional_float_extension) +TestAllExtensions.RegisterExtension(optional_double_extension) +TestAllExtensions.RegisterExtension(optional_bool_extension) +TestAllExtensions.RegisterExtension(optional_string_extension) +TestAllExtensions.RegisterExtension(optional_bytes_extension) +optionalgroup_extension.message_type = _OPTIONALGROUP_EXTENSION +TestAllExtensions.RegisterExtension(optionalgroup_extension) +optional_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(optional_nested_message_extension) +optional_foreign_message_extension.message_type = _FOREIGNMESSAGE +TestAllExtensions.RegisterExtension(optional_foreign_message_extension) +optional_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +TestAllExtensions.RegisterExtension(optional_import_message_extension) +optional_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM +TestAllExtensions.RegisterExtension(optional_nested_enum_extension) +optional_foreign_enum_extension.enum_type = _FOREIGNENUM +TestAllExtensions.RegisterExtension(optional_foreign_enum_extension) +optional_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +TestAllExtensions.RegisterExtension(optional_import_enum_extension) +TestAllExtensions.RegisterExtension(optional_string_piece_extension) +TestAllExtensions.RegisterExtension(optional_cord_extension) +optional_public_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE +TestAllExtensions.RegisterExtension(optional_public_import_message_extension) +optional_lazy_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(optional_lazy_message_extension) +TestAllExtensions.RegisterExtension(repeated_int32_extension) +TestAllExtensions.RegisterExtension(repeated_int64_extension) +TestAllExtensions.RegisterExtension(repeated_uint32_extension) +TestAllExtensions.RegisterExtension(repeated_uint64_extension) +TestAllExtensions.RegisterExtension(repeated_sint32_extension) +TestAllExtensions.RegisterExtension(repeated_sint64_extension) +TestAllExtensions.RegisterExtension(repeated_fixed32_extension) +TestAllExtensions.RegisterExtension(repeated_fixed64_extension) +TestAllExtensions.RegisterExtension(repeated_sfixed32_extension) +TestAllExtensions.RegisterExtension(repeated_sfixed64_extension) +TestAllExtensions.RegisterExtension(repeated_float_extension) +TestAllExtensions.RegisterExtension(repeated_double_extension) +TestAllExtensions.RegisterExtension(repeated_bool_extension) +TestAllExtensions.RegisterExtension(repeated_string_extension) +TestAllExtensions.RegisterExtension(repeated_bytes_extension) +repeatedgroup_extension.message_type = _REPEATEDGROUP_EXTENSION +TestAllExtensions.RegisterExtension(repeatedgroup_extension) +repeated_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(repeated_nested_message_extension) +repeated_foreign_message_extension.message_type = _FOREIGNMESSAGE +TestAllExtensions.RegisterExtension(repeated_foreign_message_extension) +repeated_import_message_extension.message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +TestAllExtensions.RegisterExtension(repeated_import_message_extension) +repeated_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM +TestAllExtensions.RegisterExtension(repeated_nested_enum_extension) +repeated_foreign_enum_extension.enum_type = _FOREIGNENUM +TestAllExtensions.RegisterExtension(repeated_foreign_enum_extension) +repeated_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +TestAllExtensions.RegisterExtension(repeated_import_enum_extension) +TestAllExtensions.RegisterExtension(repeated_string_piece_extension) +TestAllExtensions.RegisterExtension(repeated_cord_extension) +repeated_lazy_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(repeated_lazy_message_extension) +TestAllExtensions.RegisterExtension(default_int32_extension) +TestAllExtensions.RegisterExtension(default_int64_extension) +TestAllExtensions.RegisterExtension(default_uint32_extension) +TestAllExtensions.RegisterExtension(default_uint64_extension) +TestAllExtensions.RegisterExtension(default_sint32_extension) +TestAllExtensions.RegisterExtension(default_sint64_extension) +TestAllExtensions.RegisterExtension(default_fixed32_extension) +TestAllExtensions.RegisterExtension(default_fixed64_extension) +TestAllExtensions.RegisterExtension(default_sfixed32_extension) +TestAllExtensions.RegisterExtension(default_sfixed64_extension) +TestAllExtensions.RegisterExtension(default_float_extension) +TestAllExtensions.RegisterExtension(default_double_extension) +TestAllExtensions.RegisterExtension(default_bool_extension) +TestAllExtensions.RegisterExtension(default_string_extension) +TestAllExtensions.RegisterExtension(default_bytes_extension) +default_nested_enum_extension.enum_type = _TESTALLTYPES_NESTEDENUM +TestAllExtensions.RegisterExtension(default_nested_enum_extension) +default_foreign_enum_extension.enum_type = _FOREIGNENUM +TestAllExtensions.RegisterExtension(default_foreign_enum_extension) +default_import_enum_extension.enum_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTENUM +TestAllExtensions.RegisterExtension(default_import_enum_extension) +TestAllExtensions.RegisterExtension(default_string_piece_extension) +TestAllExtensions.RegisterExtension(default_cord_extension) +TestAllExtensions.RegisterExtension(oneof_uint32_extension) +oneof_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE +TestAllExtensions.RegisterExtension(oneof_nested_message_extension) +TestAllExtensions.RegisterExtension(oneof_string_extension) +TestAllExtensions.RegisterExtension(oneof_bytes_extension) +TestFieldOrderings.RegisterExtension(my_extension_string) +TestFieldOrderings.RegisterExtension(my_extension_int) +TestPackedExtensions.RegisterExtension(packed_int32_extension) +TestPackedExtensions.RegisterExtension(packed_int64_extension) +TestPackedExtensions.RegisterExtension(packed_uint32_extension) +TestPackedExtensions.RegisterExtension(packed_uint64_extension) +TestPackedExtensions.RegisterExtension(packed_sint32_extension) +TestPackedExtensions.RegisterExtension(packed_sint64_extension) +TestPackedExtensions.RegisterExtension(packed_fixed32_extension) +TestPackedExtensions.RegisterExtension(packed_fixed64_extension) +TestPackedExtensions.RegisterExtension(packed_sfixed32_extension) +TestPackedExtensions.RegisterExtension(packed_sfixed64_extension) +TestPackedExtensions.RegisterExtension(packed_float_extension) +TestPackedExtensions.RegisterExtension(packed_double_extension) +TestPackedExtensions.RegisterExtension(packed_bool_extension) +packed_enum_extension.enum_type = _FOREIGNENUM +TestPackedExtensions.RegisterExtension(packed_enum_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_int32_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_int64_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_uint32_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_uint64_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_sint32_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_sint64_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_fixed32_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_fixed64_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_sfixed32_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_sfixed64_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_float_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_double_extension) +TestUnpackedExtensions.RegisterExtension(unpacked_bool_extension) +unpacked_enum_extension.enum_type = _FOREIGNENUM +TestUnpackedExtensions.RegisterExtension(unpacked_enum_extension) +TestAllExtensions.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['test']) +TestAllExtensions.RegisterExtension(_TESTNESTEDEXTENSION.extensions_by_name['nested_string_extension']) +_TESTREQUIRED.extensions_by_name['single'].message_type = _TESTREQUIRED +TestAllExtensions.RegisterExtension(_TESTREQUIRED.extensions_by_name['single']) +_TESTREQUIRED.extensions_by_name['multi'].message_type = _TESTREQUIRED +TestAllExtensions.RegisterExtension(_TESTREQUIRED.extensions_by_name['multi']) +_TESTPARSINGMERGE.extensions_by_name['optional_ext'].message_type = _TESTALLTYPES +TestParsingMerge.RegisterExtension(_TESTPARSINGMERGE.extensions_by_name['optional_ext']) +_TESTPARSINGMERGE.extensions_by_name['repeated_ext'].message_type = _TESTALLTYPES +TestParsingMerge.RegisterExtension(_TESTPARSINGMERGE.extensions_by_name['repeated_ext']) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('B\rUnittestProtoH\001\200\001\001\210\001\001\220\001\001\370\001\001')) +_TESTENUMWITHDUPVALUE.has_options = True +_TESTENUMWITHDUPVALUE._options = _descriptor._ParseOptions(descriptor_pb2.EnumOptions(), _b('\020\001')) +optional_string_piece_extension.has_options = True +optional_string_piece_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +optional_cord_extension.has_options = True +optional_cord_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +optional_lazy_message_extension.has_options = True +optional_lazy_message_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +repeated_string_piece_extension.has_options = True +repeated_string_piece_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +repeated_cord_extension.has_options = True +repeated_cord_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +repeated_lazy_message_extension.has_options = True +repeated_lazy_message_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +default_string_piece_extension.has_options = True +default_string_piece_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +default_cord_extension.has_options = True +default_cord_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +packed_int32_extension.has_options = True +packed_int32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_int64_extension.has_options = True +packed_int64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_uint32_extension.has_options = True +packed_uint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_uint64_extension.has_options = True +packed_uint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_sint32_extension.has_options = True +packed_sint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_sint64_extension.has_options = True +packed_sint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_fixed32_extension.has_options = True +packed_fixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_fixed64_extension.has_options = True +packed_fixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_sfixed32_extension.has_options = True +packed_sfixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_sfixed64_extension.has_options = True +packed_sfixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_float_extension.has_options = True +packed_float_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_double_extension.has_options = True +packed_double_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_bool_extension.has_options = True +packed_bool_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +packed_enum_extension.has_options = True +packed_enum_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +unpacked_int32_extension.has_options = True +unpacked_int32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_int64_extension.has_options = True +unpacked_int64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_uint32_extension.has_options = True +unpacked_uint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_uint64_extension.has_options = True +unpacked_uint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_sint32_extension.has_options = True +unpacked_sint32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_sint64_extension.has_options = True +unpacked_sint64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_fixed32_extension.has_options = True +unpacked_fixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_fixed64_extension.has_options = True +unpacked_fixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_sfixed32_extension.has_options = True +unpacked_sfixed32_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_sfixed64_extension.has_options = True +unpacked_sfixed64_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_float_extension.has_options = True +unpacked_float_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_double_extension.has_options = True +unpacked_double_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_bool_extension.has_options = True +unpacked_bool_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +unpacked_enum_extension.has_options = True +unpacked_enum_extension._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTALLTYPES.fields_by_name['optional_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['optional_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['optional_cord'].has_options = True +_TESTALLTYPES.fields_by_name['optional_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['optional_lazy_message'].has_options = True +_TESTALLTYPES.fields_by_name['optional_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTALLTYPES.fields_by_name['repeated_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['repeated_cord'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['repeated_lazy_message'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTALLTYPES.fields_by_name['default_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['default_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['default_cord'].has_options = True +_TESTALLTYPES.fields_by_name['default_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32'].has_options = True +_TESTDEPRECATEDFIELDS.fields_by_name['deprecated_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\030\001')) +_TESTEAGERMESSAGE.fields_by_name['sub_message'].has_options = True +_TESTEAGERMESSAGE.fields_by_name['sub_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\000')) +_TESTLAZYMESSAGE.fields_by_name['sub_message'].has_options = True +_TESTLAZYMESSAGE.fields_by_name['sub_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTCAMELCASEFIELDNAMES.fields_by_name['StringPieceField'].has_options = True +_TESTCAMELCASEFIELDNAMES.fields_by_name['StringPieceField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTCAMELCASEFIELDNAMES.fields_by_name['CordField'].has_options = True +_TESTCAMELCASEFIELDNAMES.fields_by_name['CordField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedStringPieceField'].has_options = True +_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedStringPieceField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedCordField'].has_options = True +_TESTCAMELCASEFIELDNAMES.fields_by_name['RepeatedCordField']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTEXTREMEDEFAULTVALUES.fields_by_name['string_piece_with_zero'].has_options = True +_TESTEXTREMEDEFAULTVALUES.fields_by_name['string_piece_with_zero']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTEXTREMEDEFAULTVALUES.fields_by_name['cord_with_zero'].has_options = True +_TESTEXTREMEDEFAULTVALUES.fields_by_name['cord_with_zero']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTONEOF2.fields_by_name['foo_cord'].has_options = True +_TESTONEOF2.fields_by_name['foo_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTONEOF2.fields_by_name['foo_string_piece'].has_options = True +_TESTONEOF2.fields_by_name['foo_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTONEOF2.fields_by_name['foo_lazy_message'].has_options = True +_TESTONEOF2.fields_by_name['foo_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTONEOF2.fields_by_name['bar_cord'].has_options = True +_TESTONEOF2.fields_by_name['bar_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTONEOF2.fields_by_name['bar_string_piece'].has_options = True +_TESTONEOF2.fields_by_name['bar_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTPACKEDTYPES.fields_by_name['packed_int32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_int64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_uint32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_uint64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sint32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sint64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_fixed32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_fixed64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sfixed32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sfixed64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_float'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_double'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_bool'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_enum'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_int32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_int64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_float'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_double'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_bool'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['unpacked_enum'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['unpacked_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTDYNAMICEXTENSIONS.fields_by_name['packed_extension'].has_options = True +_TESTDYNAMICEXTENSIONS.fields_by_name['packed_extension']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) + +_TESTSERVICE = _descriptor.ServiceDescriptor( + name='TestService', + full_name='protobuf_unittest.TestService', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=12718, + serialized_end=12871, + methods=[ + _descriptor.MethodDescriptor( + name='Foo', + full_name='protobuf_unittest.TestService.Foo', + index=0, + containing_service=None, + input_type=_FOOREQUEST, + output_type=_FOORESPONSE, + options=None, + ), + _descriptor.MethodDescriptor( + name='Bar', + full_name='protobuf_unittest.TestService.Bar', + index=1, + containing_service=None, + input_type=_BARREQUEST, + output_type=_BARRESPONSE, + options=None, + ), +]) + +TestService = service_reflection.GeneratedServiceType('TestService', (_service.Service,), dict( + DESCRIPTOR = _TESTSERVICE, + __module__ = 'google.protobuf.unittest_pb2' + )) + +TestService_Stub = service_reflection.GeneratedServiceStubType('TestService_Stub', (TestService,), dict( + DESCRIPTOR = _TESTSERVICE, + __module__ = 'google.protobuf.unittest_pb2' + )) + + +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/unittest_proto3_arena_pb2.py b/deps/google/protobuf/unittest_proto3_arena_pb2.py new file mode 100644 index 00000000..5dcc3e7e --- /dev/null +++ b/deps/google/protobuf/unittest_proto3_arena_pb2.py @@ -0,0 +1,1014 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/unittest_proto3_arena.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import unittest_import_pb2 as google_dot_protobuf_dot_unittest__import__pb2 +google_dot_protobuf_dot_unittest__import__public__pb2 = google_dot_protobuf_dot_unittest__import__pb2.google_dot_protobuf_dot_unittest__import__public__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/unittest_proto3_arena.proto', + package='proto3_arena_unittest', + syntax='proto3', + serialized_pb=_b('\n+google/protobuf/unittest_proto3_arena.proto\x12\x15proto3_arena_unittest\x1a%google/protobuf/unittest_import.proto\"\xf6\x10\n\x0cTestAllTypes\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05\x12\x16\n\x0eoptional_int64\x18\x02 \x01(\x03\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x17\n\x0foptional_uint64\x18\x04 \x01(\x04\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_sint64\x18\x06 \x01(\x12\x12\x18\n\x10optional_fixed32\x18\x07 \x01(\x07\x12\x18\n\x10optional_fixed64\x18\x08 \x01(\x06\x12\x19\n\x11optional_sfixed32\x18\t \x01(\x0f\x12\x19\n\x11optional_sfixed64\x18\n \x01(\x10\x12\x16\n\x0eoptional_float\x18\x0b \x01(\x02\x12\x17\n\x0foptional_double\x18\x0c \x01(\x01\x12\x15\n\roptional_bool\x18\r \x01(\x08\x12\x17\n\x0foptional_string\x18\x0e \x01(\t\x12\x16\n\x0eoptional_bytes\x18\x0f \x01(\x0c\x12R\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessage\x12G\n\x18optional_foreign_message\x18\x13 \x01(\x0b\x32%.proto3_arena_unittest.ForeignMessage\x12H\n\x17optional_import_message\x18\x14 \x01(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12L\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32..proto3_arena_unittest.TestAllTypes.NestedEnum\x12\x41\n\x15optional_foreign_enum\x18\x16 \x01(\x0e\x32\".proto3_arena_unittest.ForeignEnum\x12!\n\x15optional_string_piece\x18\x18 \x01(\tB\x02\x08\x02\x12\x19\n\roptional_cord\x18\x19 \x01(\tB\x02\x08\x01\x12U\n\x1eoptional_public_import_message\x18\x1a \x01(\x0b\x32-.protobuf_unittest_import.PublicImportMessage\x12T\n\x15optional_lazy_message\x18\x1b \x01(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0erepeated_int32\x18\x1f \x03(\x05\x12\x16\n\x0erepeated_int64\x18 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18! \x03(\r\x12\x17\n\x0frepeated_uint64\x18\" \x03(\x04\x12\x17\n\x0frepeated_sint32\x18# \x03(\x11\x12\x17\n\x0frepeated_sint64\x18$ \x03(\x12\x12\x18\n\x10repeated_fixed32\x18% \x03(\x07\x12\x18\n\x10repeated_fixed64\x18& \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\' \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18( \x03(\x10\x12\x16\n\x0erepeated_float\x18) \x03(\x02\x12\x17\n\x0frepeated_double\x18* \x03(\x01\x12\x15\n\rrepeated_bool\x18+ \x03(\x08\x12\x17\n\x0frepeated_string\x18, \x03(\t\x12\x16\n\x0erepeated_bytes\x18- \x03(\x0c\x12R\n\x17repeated_nested_message\x18\x30 \x03(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessage\x12G\n\x18repeated_foreign_message\x18\x31 \x03(\x0b\x32%.proto3_arena_unittest.ForeignMessage\x12H\n\x17repeated_import_message\x18\x32 \x03(\x0b\x32\'.protobuf_unittest_import.ImportMessage\x12L\n\x14repeated_nested_enum\x18\x33 \x03(\x0e\x32..proto3_arena_unittest.TestAllTypes.NestedEnum\x12\x41\n\x15repeated_foreign_enum\x18\x34 \x03(\x0e\x32\".proto3_arena_unittest.ForeignEnum\x12!\n\x15repeated_string_piece\x18\x36 \x03(\tB\x02\x08\x02\x12\x19\n\rrepeated_cord\x18\x37 \x03(\tB\x02\x08\x01\x12T\n\x15repeated_lazy_message\x18\x39 \x03(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessageB\x02(\x01\x12\x16\n\x0coneof_uint32\x18o \x01(\rH\x00\x12Q\n\x14oneof_nested_message\x18p \x01(\x0b\x32\x31.proto3_arena_unittest.TestAllTypes.NestedMessageH\x00\x12\x16\n\x0coneof_string\x18q \x01(\tH\x00\x12\x15\n\x0boneof_bytes\x18r \x01(\x0cH\x00\x1a\x1b\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\"C\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\r\n\x0boneof_field\"\xae\x03\n\x0fTestPackedTypes\x12\x18\n\x0cpacked_int32\x18Z \x03(\x05\x42\x02\x10\x01\x12\x18\n\x0cpacked_int64\x18[ \x03(\x03\x42\x02\x10\x01\x12\x19\n\rpacked_uint32\x18\\ \x03(\rB\x02\x10\x01\x12\x19\n\rpacked_uint64\x18] \x03(\x04\x42\x02\x10\x01\x12\x19\n\rpacked_sint32\x18^ \x03(\x11\x42\x02\x10\x01\x12\x19\n\rpacked_sint64\x18_ \x03(\x12\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed32\x18` \x03(\x07\x42\x02\x10\x01\x12\x1a\n\x0epacked_fixed64\x18\x61 \x03(\x06\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed32\x18\x62 \x03(\x0f\x42\x02\x10\x01\x12\x1b\n\x0fpacked_sfixed64\x18\x63 \x03(\x10\x42\x02\x10\x01\x12\x18\n\x0cpacked_float\x18\x64 \x03(\x02\x42\x02\x10\x01\x12\x19\n\rpacked_double\x18\x65 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0bpacked_bool\x18\x66 \x03(\x08\x42\x02\x10\x01\x12;\n\x0bpacked_enum\x18g \x03(\x0e\x32\".proto3_arena_unittest.ForeignEnumB\x02\x10\x01\"\xdf\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0erepeated_int32\x18\x01 \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0erepeated_int64\x18\x02 \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0frepeated_uint32\x18\x03 \x03(\rB\x02\x10\x00\x12\x1b\n\x0frepeated_uint64\x18\x04 \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint32\x18\x05 \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint64\x18\x06 \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed32\x18\x07 \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed64\x18\x08 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed32\x18\t \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed64\x18\n \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0erepeated_float\x18\x0b \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0frepeated_double\x18\x0c \x03(\x01\x42\x02\x10\x00\x12\x19\n\rrepeated_bool\x18\r \x03(\x08\x42\x02\x10\x00\x12P\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32..proto3_arena_unittest.TestAllTypes.NestedEnumB\x02\x10\x00\"\x84\x01\n\x12NestedTestAllTypes\x12\x38\n\x05\x63hild\x18\x01 \x01(\x0b\x32).proto3_arena_unittest.NestedTestAllTypes\x12\x34\n\x07payload\x18\x02 \x01(\x0b\x32#.proto3_arena_unittest.TestAllTypes\"\x1b\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\"\x12\n\x10TestEmptyMessage*R\n\x0b\x46oreignEnum\x12\x10\n\x0c\x46OREIGN_ZERO\x10\x00\x12\x0f\n\x0b\x46OREIGN_FOO\x10\x04\x12\x0f\n\x0b\x46OREIGN_BAR\x10\x05\x12\x0f\n\x0b\x46OREIGN_BAZ\x10\x06\x42\x03\xf8\x01\x01\x62\x06proto3') + , + dependencies=[google_dot_protobuf_dot_unittest__import__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_FOREIGNENUM = _descriptor.EnumDescriptor( + name='ForeignEnum', + full_name='proto3_arena_unittest.ForeignEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOREIGN_ZERO', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_FOO', index=1, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_BAR', index=2, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN_BAZ', index=3, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3377, + serialized_end=3459, +) +_sym_db.RegisterEnumDescriptor(_FOREIGNENUM) + +ForeignEnum = enum_type_wrapper.EnumTypeWrapper(_FOREIGNENUM) +FOREIGN_ZERO = 0 +FOREIGN_FOO = 4 +FOREIGN_BAR = 5 +FOREIGN_BAZ = 6 + + +_TESTALLTYPES_NESTEDENUM = _descriptor.EnumDescriptor( + name='NestedEnum', + full_name='proto3_arena_unittest.TestAllTypes.NestedEnum', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ZERO', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOO', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAZ', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=4, number=-1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2194, + serialized_end=2261, +) +_sym_db.RegisterEnumDescriptor(_TESTALLTYPES_NESTEDENUM) + + +_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor( + name='NestedMessage', + full_name='proto3_arena_unittest.TestAllTypes.NestedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bb', full_name='proto3_arena_unittest.TestAllTypes.NestedMessage.bb', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2165, + serialized_end=2192, +) + +_TESTALLTYPES = _descriptor.Descriptor( + name='TestAllTypes', + full_name='proto3_arena_unittest.TestAllTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='optional_int32', full_name='proto3_arena_unittest.TestAllTypes.optional_int32', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_int64', full_name='proto3_arena_unittest.TestAllTypes.optional_int64', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint32', full_name='proto3_arena_unittest.TestAllTypes.optional_uint32', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_uint64', full_name='proto3_arena_unittest.TestAllTypes.optional_uint64', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sint32', full_name='proto3_arena_unittest.TestAllTypes.optional_sint32', index=4, + number=5, type=17, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sint64', full_name='proto3_arena_unittest.TestAllTypes.optional_sint64', index=5, + number=6, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_fixed32', full_name='proto3_arena_unittest.TestAllTypes.optional_fixed32', index=6, + number=7, type=7, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_fixed64', full_name='proto3_arena_unittest.TestAllTypes.optional_fixed64', index=7, + number=8, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sfixed32', full_name='proto3_arena_unittest.TestAllTypes.optional_sfixed32', index=8, + number=9, type=15, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_sfixed64', full_name='proto3_arena_unittest.TestAllTypes.optional_sfixed64', index=9, + number=10, type=16, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_float', full_name='proto3_arena_unittest.TestAllTypes.optional_float', index=10, + number=11, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_double', full_name='proto3_arena_unittest.TestAllTypes.optional_double', index=11, + number=12, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_bool', full_name='proto3_arena_unittest.TestAllTypes.optional_bool', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_string', full_name='proto3_arena_unittest.TestAllTypes.optional_string', index=13, + number=14, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_bytes', full_name='proto3_arena_unittest.TestAllTypes.optional_bytes', index=14, + number=15, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_message', full_name='proto3_arena_unittest.TestAllTypes.optional_nested_message', index=15, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_foreign_message', full_name='proto3_arena_unittest.TestAllTypes.optional_foreign_message', index=16, + number=19, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_import_message', full_name='proto3_arena_unittest.TestAllTypes.optional_import_message', index=17, + number=20, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_nested_enum', full_name='proto3_arena_unittest.TestAllTypes.optional_nested_enum', index=18, + number=21, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_foreign_enum', full_name='proto3_arena_unittest.TestAllTypes.optional_foreign_enum', index=19, + number=22, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_string_piece', full_name='proto3_arena_unittest.TestAllTypes.optional_string_piece', index=20, + number=24, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='optional_cord', full_name='proto3_arena_unittest.TestAllTypes.optional_cord', index=21, + number=25, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='optional_public_import_message', full_name='proto3_arena_unittest.TestAllTypes.optional_public_import_message', index=22, + number=26, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='optional_lazy_message', full_name='proto3_arena_unittest.TestAllTypes.optional_lazy_message', index=23, + number=27, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='proto3_arena_unittest.TestAllTypes.repeated_int32', index=24, + number=31, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='proto3_arena_unittest.TestAllTypes.repeated_int64', index=25, + number=32, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint32', full_name='proto3_arena_unittest.TestAllTypes.repeated_uint32', index=26, + number=33, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='proto3_arena_unittest.TestAllTypes.repeated_uint64', index=27, + number=34, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint32', full_name='proto3_arena_unittest.TestAllTypes.repeated_sint32', index=28, + number=35, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sint64', full_name='proto3_arena_unittest.TestAllTypes.repeated_sint64', index=29, + number=36, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='proto3_arena_unittest.TestAllTypes.repeated_fixed32', index=30, + number=37, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='proto3_arena_unittest.TestAllTypes.repeated_fixed64', index=31, + number=38, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed32', full_name='proto3_arena_unittest.TestAllTypes.repeated_sfixed32', index=32, + number=39, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_sfixed64', full_name='proto3_arena_unittest.TestAllTypes.repeated_sfixed64', index=33, + number=40, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='proto3_arena_unittest.TestAllTypes.repeated_float', index=34, + number=41, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_double', full_name='proto3_arena_unittest.TestAllTypes.repeated_double', index=35, + number=42, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bool', full_name='proto3_arena_unittest.TestAllTypes.repeated_bool', index=36, + number=43, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string', full_name='proto3_arena_unittest.TestAllTypes.repeated_string', index=37, + number=44, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bytes', full_name='proto3_arena_unittest.TestAllTypes.repeated_bytes', index=38, + number=45, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_nested_message', index=39, + number=48, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_foreign_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_foreign_message', index=40, + number=49, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_import_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_import_message', index=41, + number=50, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='proto3_arena_unittest.TestAllTypes.repeated_nested_enum', index=42, + number=51, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_foreign_enum', full_name='proto3_arena_unittest.TestAllTypes.repeated_foreign_enum', index=43, + number=52, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string_piece', full_name='proto3_arena_unittest.TestAllTypes.repeated_string_piece', index=44, + number=54, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002'))), + _descriptor.FieldDescriptor( + name='repeated_cord', full_name='proto3_arena_unittest.TestAllTypes.repeated_cord', index=45, + number=55, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001'))), + _descriptor.FieldDescriptor( + name='repeated_lazy_message', full_name='proto3_arena_unittest.TestAllTypes.repeated_lazy_message', index=46, + number=57, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001'))), + _descriptor.FieldDescriptor( + name='oneof_uint32', full_name='proto3_arena_unittest.TestAllTypes.oneof_uint32', index=47, + number=111, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_nested_message', full_name='proto3_arena_unittest.TestAllTypes.oneof_nested_message', index=48, + number=112, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_string', full_name='proto3_arena_unittest.TestAllTypes.oneof_string', index=49, + number=113, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_bytes', full_name='proto3_arena_unittest.TestAllTypes.oneof_bytes', index=50, + number=114, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTALLTYPES_NESTEDMESSAGE, ], + enum_types=[ + _TESTALLTYPES_NESTEDENUM, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='oneof_field', full_name='proto3_arena_unittest.TestAllTypes.oneof_field', + index=0, containing_type=None, fields=[]), + ], + serialized_start=110, + serialized_end=2276, +) + + +_TESTPACKEDTYPES = _descriptor.Descriptor( + name='TestPackedTypes', + full_name='proto3_arena_unittest.TestPackedTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='packed_int32', full_name='proto3_arena_unittest.TestPackedTypes.packed_int32', index=0, + number=90, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_int64', full_name='proto3_arena_unittest.TestPackedTypes.packed_int64', index=1, + number=91, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_uint32', full_name='proto3_arena_unittest.TestPackedTypes.packed_uint32', index=2, + number=92, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_uint64', full_name='proto3_arena_unittest.TestPackedTypes.packed_uint64', index=3, + number=93, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sint32', full_name='proto3_arena_unittest.TestPackedTypes.packed_sint32', index=4, + number=94, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sint64', full_name='proto3_arena_unittest.TestPackedTypes.packed_sint64', index=5, + number=95, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_fixed32', full_name='proto3_arena_unittest.TestPackedTypes.packed_fixed32', index=6, + number=96, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_fixed64', full_name='proto3_arena_unittest.TestPackedTypes.packed_fixed64', index=7, + number=97, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sfixed32', full_name='proto3_arena_unittest.TestPackedTypes.packed_sfixed32', index=8, + number=98, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_sfixed64', full_name='proto3_arena_unittest.TestPackedTypes.packed_sfixed64', index=9, + number=99, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_float', full_name='proto3_arena_unittest.TestPackedTypes.packed_float', index=10, + number=100, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_double', full_name='proto3_arena_unittest.TestPackedTypes.packed_double', index=11, + number=101, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_bool', full_name='proto3_arena_unittest.TestPackedTypes.packed_bool', index=12, + number=102, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + _descriptor.FieldDescriptor( + name='packed_enum', full_name='proto3_arena_unittest.TestPackedTypes.packed_enum', index=13, + number=103, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2279, + serialized_end=2709, +) + + +_TESTUNPACKEDTYPES = _descriptor.Descriptor( + name='TestUnpackedTypes', + full_name='proto3_arena_unittest.TestUnpackedTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='repeated_int32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_int32', index=0, + number=1, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_int64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_int64', index=1, + number=2, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_uint32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_uint32', index=2, + number=3, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_uint64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_uint64', index=3, + number=4, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sint32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sint32', index=4, + number=5, type=17, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sint64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sint64', index=5, + number=6, type=18, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_fixed32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_fixed32', index=6, + number=7, type=7, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_fixed64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_fixed64', index=7, + number=8, type=6, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sfixed32', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sfixed32', index=8, + number=9, type=15, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_sfixed64', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_sfixed64', index=9, + number=10, type=16, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_float', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_float', index=10, + number=11, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_double', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_double', index=11, + number=12, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_bool', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_bool', index=12, + number=13, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + _descriptor.FieldDescriptor( + name='repeated_nested_enum', full_name='proto3_arena_unittest.TestUnpackedTypes.repeated_nested_enum', index=13, + number=14, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000'))), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2712, + serialized_end=3191, +) + + +_NESTEDTESTALLTYPES = _descriptor.Descriptor( + name='NestedTestAllTypes', + full_name='proto3_arena_unittest.NestedTestAllTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='child', full_name='proto3_arena_unittest.NestedTestAllTypes.child', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='payload', full_name='proto3_arena_unittest.NestedTestAllTypes.payload', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3194, + serialized_end=3326, +) + + +_FOREIGNMESSAGE = _descriptor.Descriptor( + name='ForeignMessage', + full_name='proto3_arena_unittest.ForeignMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='c', full_name='proto3_arena_unittest.ForeignMessage.c', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3328, + serialized_end=3355, +) + + +_TESTEMPTYMESSAGE = _descriptor.Descriptor( + name='TestEmptyMessage', + full_name='proto3_arena_unittest.TestEmptyMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3357, + serialized_end=3375, +) + +_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES +_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['optional_foreign_message'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES.fields_by_name['optional_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +_TESTALLTYPES.fields_by_name['optional_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['optional_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['optional_public_import_message'].message_type = google_dot_protobuf_dot_unittest__import__public__pb2._PUBLICIMPORTMESSAGE +_TESTALLTYPES.fields_by_name['optional_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['repeated_foreign_message'].message_type = _FOREIGNMESSAGE +_TESTALLTYPES.fields_by_name['repeated_import_message'].message_type = google_dot_protobuf_dot_unittest__import__pb2._IMPORTMESSAGE +_TESTALLTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_TESTALLTYPES.fields_by_name['repeated_foreign_enum'].enum_type = _FOREIGNENUM +_TESTALLTYPES.fields_by_name['repeated_lazy_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES.fields_by_name['oneof_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE +_TESTALLTYPES_NESTEDENUM.containing_type = _TESTALLTYPES +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_uint32']) +_TESTALLTYPES.fields_by_name['oneof_uint32'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_nested_message']) +_TESTALLTYPES.fields_by_name['oneof_nested_message'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_string']) +_TESTALLTYPES.fields_by_name['oneof_string'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTALLTYPES.oneofs_by_name['oneof_field'].fields.append( + _TESTALLTYPES.fields_by_name['oneof_bytes']) +_TESTALLTYPES.fields_by_name['oneof_bytes'].containing_oneof = _TESTALLTYPES.oneofs_by_name['oneof_field'] +_TESTPACKEDTYPES.fields_by_name['packed_enum'].enum_type = _FOREIGNENUM +_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTALLTYPES_NESTEDENUM +_NESTEDTESTALLTYPES.fields_by_name['child'].message_type = _NESTEDTESTALLTYPES +_NESTEDTESTALLTYPES.fields_by_name['payload'].message_type = _TESTALLTYPES +DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES +DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES +DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES +DESCRIPTOR.message_types_by_name['NestedTestAllTypes'] = _NESTEDTESTALLTYPES +DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE +DESCRIPTOR.message_types_by_name['TestEmptyMessage'] = _TESTEMPTYMESSAGE +DESCRIPTOR.enum_types_by_name['ForeignEnum'] = _FOREIGNENUM + +TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), dict( + + NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTALLTYPES_NESTEDMESSAGE, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestAllTypes.NestedMessage) + )) + , + DESCRIPTOR = _TESTALLTYPES, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestAllTypes) + )) +_sym_db.RegisterMessage(TestAllTypes) +_sym_db.RegisterMessage(TestAllTypes.NestedMessage) + +TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), dict( + DESCRIPTOR = _TESTPACKEDTYPES, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestPackedTypes) + )) +_sym_db.RegisterMessage(TestPackedTypes) + +TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), dict( + DESCRIPTOR = _TESTUNPACKEDTYPES, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestUnpackedTypes) + )) +_sym_db.RegisterMessage(TestUnpackedTypes) + +NestedTestAllTypes = _reflection.GeneratedProtocolMessageType('NestedTestAllTypes', (_message.Message,), dict( + DESCRIPTOR = _NESTEDTESTALLTYPES, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.NestedTestAllTypes) + )) +_sym_db.RegisterMessage(NestedTestAllTypes) + +ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), dict( + DESCRIPTOR = _FOREIGNMESSAGE, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.ForeignMessage) + )) +_sym_db.RegisterMessage(ForeignMessage) + +TestEmptyMessage = _reflection.GeneratedProtocolMessageType('TestEmptyMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTEMPTYMESSAGE, + __module__ = 'google.protobuf.unittest_proto3_arena_pb2' + # @@protoc_insertion_point(class_scope:proto3_arena_unittest.TestEmptyMessage) + )) +_sym_db.RegisterMessage(TestEmptyMessage) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001')) +_TESTALLTYPES.fields_by_name['optional_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['optional_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['optional_cord'].has_options = True +_TESTALLTYPES.fields_by_name['optional_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['optional_lazy_message'].has_options = True +_TESTALLTYPES.fields_by_name['optional_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTALLTYPES.fields_by_name['repeated_string_piece'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_string_piece']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\002')) +_TESTALLTYPES.fields_by_name['repeated_cord'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_cord']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\010\001')) +_TESTALLTYPES.fields_by_name['repeated_lazy_message'].has_options = True +_TESTALLTYPES.fields_by_name['repeated_lazy_message']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('(\001')) +_TESTPACKEDTYPES.fields_by_name['packed_int32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_int64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_uint32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_uint64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sint32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sint64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_fixed32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_fixed64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sfixed32'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_sfixed64'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_float'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_double'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_bool'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTPACKEDTYPES.fields_by_name['packed_enum'].has_options = True +_TESTPACKEDTYPES.fields_by_name['packed_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_int32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_int32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_int64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_int64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_float'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_float']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_double'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_double']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_bool'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_bool']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].has_options = True +_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\000')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/util/__init__.py b/deps/google/protobuf/util/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/deps/google/protobuf/util/__pycache__/__init__.cpython-34.pyc b/deps/google/protobuf/util/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..11786b90 Binary files /dev/null and b/deps/google/protobuf/util/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-34.pyc b/deps/google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-34.pyc new file mode 100644 index 00000000..490ee4ad Binary files /dev/null and b/deps/google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-34.pyc differ diff --git a/deps/google/protobuf/util/json_format_proto3_pb2.py b/deps/google/protobuf/util/json_format_proto3_pb2.py new file mode 100644 index 00000000..f2acc493 --- /dev/null +++ b/deps/google/protobuf/util/json_format_proto3_pb2.py @@ -0,0 +1,1811 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/util/json_format_proto3.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/util/json_format_proto3.proto', + package='proto3', + syntax='proto3', + serialized_pb=_b('\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x19google/protobuf/any.proto\x1a google/protobuf/field_mask.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\xd4\x01\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x62\x06proto3') + , + dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_ENUMTYPE = _descriptor.EnumDescriptor( + name='EnumType', + full_name='proto3.EnumType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FOO', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BAR', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=4425, + serialized_end=4453, +) +_sym_db.RegisterEnumDescriptor(_ENUMTYPE) + +EnumType = enum_type_wrapper.EnumTypeWrapper(_ENUMTYPE) +FOO = 0 +BAR = 1 + + + +_MESSAGETYPE = _descriptor.Descriptor( + name='MessageType', + full_name='proto3.MessageType', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.MessageType.value', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=245, + serialized_end=273, +) + + +_TESTMESSAGE = _descriptor.Descriptor( + name='TestMessage', + full_name='proto3.TestMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bool_value', full_name='proto3.TestMessage.bool_value', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int32_value', full_name='proto3.TestMessage.int32_value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int64_value', full_name='proto3.TestMessage.int64_value', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint32_value', full_name='proto3.TestMessage.uint32_value', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint64_value', full_name='proto3.TestMessage.uint64_value', index=4, + number=5, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='float_value', full_name='proto3.TestMessage.float_value', index=5, + number=6, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_value', full_name='proto3.TestMessage.double_value', index=6, + number=7, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_value', full_name='proto3.TestMessage.string_value', index=7, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bytes_value', full_name='proto3.TestMessage.bytes_value', index=8, + number=9, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='enum_value', full_name='proto3.TestMessage.enum_value', index=9, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_value', full_name='proto3.TestMessage.message_value', index=10, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bool_value', full_name='proto3.TestMessage.repeated_bool_value', index=11, + number=21, type=8, cpp_type=7, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int32_value', full_name='proto3.TestMessage.repeated_int32_value', index=12, + number=22, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64_value', full_name='proto3.TestMessage.repeated_int64_value', index=13, + number=23, type=3, cpp_type=2, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint32_value', full_name='proto3.TestMessage.repeated_uint32_value', index=14, + number=24, type=13, cpp_type=3, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64_value', full_name='proto3.TestMessage.repeated_uint64_value', index=15, + number=25, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float_value', full_name='proto3.TestMessage.repeated_float_value', index=16, + number=26, type=2, cpp_type=6, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_double_value', full_name='proto3.TestMessage.repeated_double_value', index=17, + number=27, type=1, cpp_type=5, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string_value', full_name='proto3.TestMessage.repeated_string_value', index=18, + number=28, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bytes_value', full_name='proto3.TestMessage.repeated_bytes_value', index=19, + number=29, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_enum_value', full_name='proto3.TestMessage.repeated_enum_value', index=20, + number=30, type=14, cpp_type=8, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_message_value', full_name='proto3.TestMessage.repeated_message_value', index=21, + number=31, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=276, + serialized_end=936, +) + + +_TESTONEOF = _descriptor.Descriptor( + name='TestOneof', + full_name='proto3.TestOneof', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='oneof_int32_value', full_name='proto3.TestOneof.oneof_int32_value', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_string_value', full_name='proto3.TestOneof.oneof_string_value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_bytes_value', full_name='proto3.TestOneof.oneof_bytes_value', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_enum_value', full_name='proto3.TestOneof.oneof_enum_value', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='oneof_message_value', full_name='proto3.TestOneof.oneof_message_value', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='oneof_value', full_name='proto3.TestOneof.oneof_value', + index=0, containing_type=None, fields=[]), + ], + serialized_start=939, + serialized_end=1151, +) + + +_TESTMAP_BOOLMAPENTRY = _descriptor.Descriptor( + name='BoolMapEntry', + full_name='proto3.TestMap.BoolMapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestMap.BoolMapEntry.key', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestMap.BoolMapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1469, + serialized_end=1515, +) + +_TESTMAP_INT32MAPENTRY = _descriptor.Descriptor( + name='Int32MapEntry', + full_name='proto3.TestMap.Int32MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestMap.Int32MapEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestMap.Int32MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1517, + serialized_end=1564, +) + +_TESTMAP_INT64MAPENTRY = _descriptor.Descriptor( + name='Int64MapEntry', + full_name='proto3.TestMap.Int64MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestMap.Int64MapEntry.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestMap.Int64MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1566, + serialized_end=1613, +) + +_TESTMAP_UINT32MAPENTRY = _descriptor.Descriptor( + name='Uint32MapEntry', + full_name='proto3.TestMap.Uint32MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestMap.Uint32MapEntry.key', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestMap.Uint32MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1615, + serialized_end=1663, +) + +_TESTMAP_UINT64MAPENTRY = _descriptor.Descriptor( + name='Uint64MapEntry', + full_name='proto3.TestMap.Uint64MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestMap.Uint64MapEntry.key', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestMap.Uint64MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1665, + serialized_end=1713, +) + +_TESTMAP_STRINGMAPENTRY = _descriptor.Descriptor( + name='StringMapEntry', + full_name='proto3.TestMap.StringMapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestMap.StringMapEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestMap.StringMapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1715, + serialized_end=1763, +) + +_TESTMAP = _descriptor.Descriptor( + name='TestMap', + full_name='proto3.TestMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bool_map', full_name='proto3.TestMap.bool_map', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int32_map', full_name='proto3.TestMap.int32_map', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int64_map', full_name='proto3.TestMap.int64_map', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint32_map', full_name='proto3.TestMap.uint32_map', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint64_map', full_name='proto3.TestMap.uint64_map', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_map', full_name='proto3.TestMap.string_map', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTMAP_BOOLMAPENTRY, _TESTMAP_INT32MAPENTRY, _TESTMAP_INT64MAPENTRY, _TESTMAP_UINT32MAPENTRY, _TESTMAP_UINT64MAPENTRY, _TESTMAP_STRINGMAPENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1154, + serialized_end=1763, +) + + +_TESTNESTEDMAP_BOOLMAPENTRY = _descriptor.Descriptor( + name='BoolMapEntry', + full_name='proto3.TestNestedMap.BoolMapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.BoolMapEntry.key', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.BoolMapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1469, + serialized_end=1515, +) + +_TESTNESTEDMAP_INT32MAPENTRY = _descriptor.Descriptor( + name='Int32MapEntry', + full_name='proto3.TestNestedMap.Int32MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.Int32MapEntry.key', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.Int32MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1517, + serialized_end=1564, +) + +_TESTNESTEDMAP_INT64MAPENTRY = _descriptor.Descriptor( + name='Int64MapEntry', + full_name='proto3.TestNestedMap.Int64MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.Int64MapEntry.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.Int64MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1566, + serialized_end=1613, +) + +_TESTNESTEDMAP_UINT32MAPENTRY = _descriptor.Descriptor( + name='Uint32MapEntry', + full_name='proto3.TestNestedMap.Uint32MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.Uint32MapEntry.key', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.Uint32MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1615, + serialized_end=1663, +) + +_TESTNESTEDMAP_UINT64MAPENTRY = _descriptor.Descriptor( + name='Uint64MapEntry', + full_name='proto3.TestNestedMap.Uint64MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.Uint64MapEntry.key', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.Uint64MapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1665, + serialized_end=1713, +) + +_TESTNESTEDMAP_STRINGMAPENTRY = _descriptor.Descriptor( + name='StringMapEntry', + full_name='proto3.TestNestedMap.StringMapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.StringMapEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.StringMapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1715, + serialized_end=1763, +) + +_TESTNESTEDMAP_MAPMAPENTRY = _descriptor.Descriptor( + name='MapMapEntry', + full_name='proto3.TestNestedMap.MapMapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestNestedMap.MapMapEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestNestedMap.MapMapEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2471, + serialized_end=2539, +) + +_TESTNESTEDMAP = _descriptor.Descriptor( + name='TestNestedMap', + full_name='proto3.TestNestedMap', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bool_map', full_name='proto3.TestNestedMap.bool_map', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int32_map', full_name='proto3.TestNestedMap.int32_map', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int64_map', full_name='proto3.TestNestedMap.int64_map', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint32_map', full_name='proto3.TestNestedMap.uint32_map', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint64_map', full_name='proto3.TestNestedMap.uint64_map', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_map', full_name='proto3.TestNestedMap.string_map', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_map', full_name='proto3.TestNestedMap.map_map', index=6, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTNESTEDMAP_BOOLMAPENTRY, _TESTNESTEDMAP_INT32MAPENTRY, _TESTNESTEDMAP_INT64MAPENTRY, _TESTNESTEDMAP_UINT32MAPENTRY, _TESTNESTEDMAP_UINT64MAPENTRY, _TESTNESTEDMAP_STRINGMAPENTRY, _TESTNESTEDMAP_MAPMAPENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1766, + serialized_end=2539, +) + + +_TESTWRAPPER = _descriptor.Descriptor( + name='TestWrapper', + full_name='proto3.TestWrapper', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bool_value', full_name='proto3.TestWrapper.bool_value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int32_value', full_name='proto3.TestWrapper.int32_value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='int64_value', full_name='proto3.TestWrapper.int64_value', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint32_value', full_name='proto3.TestWrapper.uint32_value', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='uint64_value', full_name='proto3.TestWrapper.uint64_value', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='float_value', full_name='proto3.TestWrapper.float_value', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_value', full_name='proto3.TestWrapper.double_value', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='string_value', full_name='proto3.TestWrapper.string_value', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bytes_value', full_name='proto3.TestWrapper.bytes_value', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bool_value', full_name='proto3.TestWrapper.repeated_bool_value', index=9, + number=11, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int32_value', full_name='proto3.TestWrapper.repeated_int32_value', index=10, + number=12, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_int64_value', full_name='proto3.TestWrapper.repeated_int64_value', index=11, + number=13, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint32_value', full_name='proto3.TestWrapper.repeated_uint32_value', index=12, + number=14, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_uint64_value', full_name='proto3.TestWrapper.repeated_uint64_value', index=13, + number=15, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_float_value', full_name='proto3.TestWrapper.repeated_float_value', index=14, + number=16, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_double_value', full_name='proto3.TestWrapper.repeated_double_value', index=15, + number=17, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_string_value', full_name='proto3.TestWrapper.repeated_string_value', index=16, + number=18, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_bytes_value', full_name='proto3.TestWrapper.repeated_bytes_value', index=17, + number=19, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2542, + serialized_end=3548, +) + + +_TESTTIMESTAMP = _descriptor.Descriptor( + name='TestTimestamp', + full_name='proto3.TestTimestamp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestTimestamp.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_value', full_name='proto3.TestTimestamp.repeated_value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3550, + serialized_end=3660, +) + + +_TESTDURATION = _descriptor.Descriptor( + name='TestDuration', + full_name='proto3.TestDuration', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestDuration.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_value', full_name='proto3.TestDuration.repeated_value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3662, + serialized_end=3769, +) + + +_TESTFIELDMASK = _descriptor.Descriptor( + name='TestFieldMask', + full_name='proto3.TestFieldMask', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestFieldMask.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3771, + serialized_end=3829, +) + + +_TESTSTRUCT = _descriptor.Descriptor( + name='TestStruct', + full_name='proto3.TestStruct', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestStruct.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_value', full_name='proto3.TestStruct.repeated_value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3831, + serialized_end=3932, +) + + +_TESTANY = _descriptor.Descriptor( + name='TestAny', + full_name='proto3.TestAny', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestAny.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_value', full_name='proto3.TestAny.repeated_value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3934, + serialized_end=4026, +) + + +_TESTVALUE = _descriptor.Descriptor( + name='TestValue', + full_name='proto3.TestValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestValue.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_value', full_name='proto3.TestValue.repeated_value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4028, + serialized_end=4126, +) + + +_TESTLISTVALUE = _descriptor.Descriptor( + name='TestListValue', + full_name='proto3.TestListValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestListValue.value', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repeated_value', full_name='proto3.TestListValue.repeated_value', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4128, + serialized_end=4238, +) + + +_TESTBOOLVALUE_BOOLMAPENTRY = _descriptor.Descriptor( + name='BoolMapEntry', + full_name='proto3.TestBoolValue.BoolMapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='proto3.TestBoolValue.BoolMapEntry.key', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestBoolValue.BoolMapEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1469, + serialized_end=1515, +) + +_TESTBOOLVALUE = _descriptor.Descriptor( + name='TestBoolValue', + full_name='proto3.TestBoolValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bool_value', full_name='proto3.TestBoolValue.bool_value', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bool_map', full_name='proto3.TestBoolValue.bool_map', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TESTBOOLVALUE_BOOLMAPENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4241, + serialized_end=4378, +) + + +_TESTCUSTOMJSONNAME = _descriptor.Descriptor( + name='TestCustomJsonName', + full_name='proto3.TestCustomJsonName', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='proto3.TestCustomJsonName.value', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4380, + serialized_end=4423, +) + +_TESTMESSAGE.fields_by_name['enum_value'].enum_type = _ENUMTYPE +_TESTMESSAGE.fields_by_name['message_value'].message_type = _MESSAGETYPE +_TESTMESSAGE.fields_by_name['repeated_enum_value'].enum_type = _ENUMTYPE +_TESTMESSAGE.fields_by_name['repeated_message_value'].message_type = _MESSAGETYPE +_TESTONEOF.fields_by_name['oneof_enum_value'].enum_type = _ENUMTYPE +_TESTONEOF.fields_by_name['oneof_message_value'].message_type = _MESSAGETYPE +_TESTONEOF.oneofs_by_name['oneof_value'].fields.append( + _TESTONEOF.fields_by_name['oneof_int32_value']) +_TESTONEOF.fields_by_name['oneof_int32_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value'] +_TESTONEOF.oneofs_by_name['oneof_value'].fields.append( + _TESTONEOF.fields_by_name['oneof_string_value']) +_TESTONEOF.fields_by_name['oneof_string_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value'] +_TESTONEOF.oneofs_by_name['oneof_value'].fields.append( + _TESTONEOF.fields_by_name['oneof_bytes_value']) +_TESTONEOF.fields_by_name['oneof_bytes_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value'] +_TESTONEOF.oneofs_by_name['oneof_value'].fields.append( + _TESTONEOF.fields_by_name['oneof_enum_value']) +_TESTONEOF.fields_by_name['oneof_enum_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value'] +_TESTONEOF.oneofs_by_name['oneof_value'].fields.append( + _TESTONEOF.fields_by_name['oneof_message_value']) +_TESTONEOF.fields_by_name['oneof_message_value'].containing_oneof = _TESTONEOF.oneofs_by_name['oneof_value'] +_TESTMAP_BOOLMAPENTRY.containing_type = _TESTMAP +_TESTMAP_INT32MAPENTRY.containing_type = _TESTMAP +_TESTMAP_INT64MAPENTRY.containing_type = _TESTMAP +_TESTMAP_UINT32MAPENTRY.containing_type = _TESTMAP +_TESTMAP_UINT64MAPENTRY.containing_type = _TESTMAP +_TESTMAP_STRINGMAPENTRY.containing_type = _TESTMAP +_TESTMAP.fields_by_name['bool_map'].message_type = _TESTMAP_BOOLMAPENTRY +_TESTMAP.fields_by_name['int32_map'].message_type = _TESTMAP_INT32MAPENTRY +_TESTMAP.fields_by_name['int64_map'].message_type = _TESTMAP_INT64MAPENTRY +_TESTMAP.fields_by_name['uint32_map'].message_type = _TESTMAP_UINT32MAPENTRY +_TESTMAP.fields_by_name['uint64_map'].message_type = _TESTMAP_UINT64MAPENTRY +_TESTMAP.fields_by_name['string_map'].message_type = _TESTMAP_STRINGMAPENTRY +_TESTNESTEDMAP_BOOLMAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP_INT32MAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP_INT64MAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP_UINT32MAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP_UINT64MAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP_STRINGMAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP_MAPMAPENTRY.fields_by_name['value'].message_type = _TESTNESTEDMAP +_TESTNESTEDMAP_MAPMAPENTRY.containing_type = _TESTNESTEDMAP +_TESTNESTEDMAP.fields_by_name['bool_map'].message_type = _TESTNESTEDMAP_BOOLMAPENTRY +_TESTNESTEDMAP.fields_by_name['int32_map'].message_type = _TESTNESTEDMAP_INT32MAPENTRY +_TESTNESTEDMAP.fields_by_name['int64_map'].message_type = _TESTNESTEDMAP_INT64MAPENTRY +_TESTNESTEDMAP.fields_by_name['uint32_map'].message_type = _TESTNESTEDMAP_UINT32MAPENTRY +_TESTNESTEDMAP.fields_by_name['uint64_map'].message_type = _TESTNESTEDMAP_UINT64MAPENTRY +_TESTNESTEDMAP.fields_by_name['string_map'].message_type = _TESTNESTEDMAP_STRINGMAPENTRY +_TESTNESTEDMAP.fields_by_name['map_map'].message_type = _TESTNESTEDMAP_MAPMAPENTRY +_TESTWRAPPER.fields_by_name['bool_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE +_TESTWRAPPER.fields_by_name['int32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE +_TESTWRAPPER.fields_by_name['int64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE +_TESTWRAPPER.fields_by_name['uint32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE +_TESTWRAPPER.fields_by_name['uint64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT64VALUE +_TESTWRAPPER.fields_by_name['float_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._FLOATVALUE +_TESTWRAPPER.fields_by_name['double_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE +_TESTWRAPPER.fields_by_name['string_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE +_TESTWRAPPER.fields_by_name['bytes_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE +_TESTWRAPPER.fields_by_name['repeated_bool_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE +_TESTWRAPPER.fields_by_name['repeated_int32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE +_TESTWRAPPER.fields_by_name['repeated_int64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE +_TESTWRAPPER.fields_by_name['repeated_uint32_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE +_TESTWRAPPER.fields_by_name['repeated_uint64_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT64VALUE +_TESTWRAPPER.fields_by_name['repeated_float_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._FLOATVALUE +_TESTWRAPPER.fields_by_name['repeated_double_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE +_TESTWRAPPER.fields_by_name['repeated_string_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE +_TESTWRAPPER.fields_by_name['repeated_bytes_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BYTESVALUE +_TESTTIMESTAMP.fields_by_name['value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TESTTIMESTAMP.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TESTDURATION.fields_by_name['value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_TESTDURATION.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_TESTFIELDMASK.fields_by_name['value'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_TESTSTRUCT.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT +_TESTSTRUCT.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT +_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_TESTANY.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_TESTVALUE.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE +_TESTVALUE.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_struct__pb2._VALUE +_TESTLISTVALUE.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._LISTVALUE +_TESTLISTVALUE.fields_by_name['repeated_value'].message_type = google_dot_protobuf_dot_struct__pb2._LISTVALUE +_TESTBOOLVALUE_BOOLMAPENTRY.containing_type = _TESTBOOLVALUE +_TESTBOOLVALUE.fields_by_name['bool_map'].message_type = _TESTBOOLVALUE_BOOLMAPENTRY +DESCRIPTOR.message_types_by_name['MessageType'] = _MESSAGETYPE +DESCRIPTOR.message_types_by_name['TestMessage'] = _TESTMESSAGE +DESCRIPTOR.message_types_by_name['TestOneof'] = _TESTONEOF +DESCRIPTOR.message_types_by_name['TestMap'] = _TESTMAP +DESCRIPTOR.message_types_by_name['TestNestedMap'] = _TESTNESTEDMAP +DESCRIPTOR.message_types_by_name['TestWrapper'] = _TESTWRAPPER +DESCRIPTOR.message_types_by_name['TestTimestamp'] = _TESTTIMESTAMP +DESCRIPTOR.message_types_by_name['TestDuration'] = _TESTDURATION +DESCRIPTOR.message_types_by_name['TestFieldMask'] = _TESTFIELDMASK +DESCRIPTOR.message_types_by_name['TestStruct'] = _TESTSTRUCT +DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY +DESCRIPTOR.message_types_by_name['TestValue'] = _TESTVALUE +DESCRIPTOR.message_types_by_name['TestListValue'] = _TESTLISTVALUE +DESCRIPTOR.message_types_by_name['TestBoolValue'] = _TESTBOOLVALUE +DESCRIPTOR.message_types_by_name['TestCustomJsonName'] = _TESTCUSTOMJSONNAME +DESCRIPTOR.enum_types_by_name['EnumType'] = _ENUMTYPE + +MessageType = _reflection.GeneratedProtocolMessageType('MessageType', (_message.Message,), dict( + DESCRIPTOR = _MESSAGETYPE, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.MessageType) + )) +_sym_db.RegisterMessage(MessageType) + +TestMessage = _reflection.GeneratedProtocolMessageType('TestMessage', (_message.Message,), dict( + DESCRIPTOR = _TESTMESSAGE, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMessage) + )) +_sym_db.RegisterMessage(TestMessage) + +TestOneof = _reflection.GeneratedProtocolMessageType('TestOneof', (_message.Message,), dict( + DESCRIPTOR = _TESTONEOF, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestOneof) + )) +_sym_db.RegisterMessage(TestOneof) + +TestMap = _reflection.GeneratedProtocolMessageType('TestMap', (_message.Message,), dict( + + BoolMapEntry = _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_BOOLMAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap.BoolMapEntry) + )) + , + + Int32MapEntry = _reflection.GeneratedProtocolMessageType('Int32MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_INT32MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap.Int32MapEntry) + )) + , + + Int64MapEntry = _reflection.GeneratedProtocolMessageType('Int64MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_INT64MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap.Int64MapEntry) + )) + , + + Uint32MapEntry = _reflection.GeneratedProtocolMessageType('Uint32MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_UINT32MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap.Uint32MapEntry) + )) + , + + Uint64MapEntry = _reflection.GeneratedProtocolMessageType('Uint64MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_UINT64MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap.Uint64MapEntry) + )) + , + + StringMapEntry = _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTMAP_STRINGMAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap.StringMapEntry) + )) + , + DESCRIPTOR = _TESTMAP, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestMap) + )) +_sym_db.RegisterMessage(TestMap) +_sym_db.RegisterMessage(TestMap.BoolMapEntry) +_sym_db.RegisterMessage(TestMap.Int32MapEntry) +_sym_db.RegisterMessage(TestMap.Int64MapEntry) +_sym_db.RegisterMessage(TestMap.Uint32MapEntry) +_sym_db.RegisterMessage(TestMap.Uint64MapEntry) +_sym_db.RegisterMessage(TestMap.StringMapEntry) + +TestNestedMap = _reflection.GeneratedProtocolMessageType('TestNestedMap', (_message.Message,), dict( + + BoolMapEntry = _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_BOOLMAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.BoolMapEntry) + )) + , + + Int32MapEntry = _reflection.GeneratedProtocolMessageType('Int32MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_INT32MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Int32MapEntry) + )) + , + + Int64MapEntry = _reflection.GeneratedProtocolMessageType('Int64MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_INT64MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Int64MapEntry) + )) + , + + Uint32MapEntry = _reflection.GeneratedProtocolMessageType('Uint32MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_UINT32MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Uint32MapEntry) + )) + , + + Uint64MapEntry = _reflection.GeneratedProtocolMessageType('Uint64MapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_UINT64MAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Uint64MapEntry) + )) + , + + StringMapEntry = _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_STRINGMAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.StringMapEntry) + )) + , + + MapMapEntry = _reflection.GeneratedProtocolMessageType('MapMapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTNESTEDMAP_MAPMAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.MapMapEntry) + )) + , + DESCRIPTOR = _TESTNESTEDMAP, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestNestedMap) + )) +_sym_db.RegisterMessage(TestNestedMap) +_sym_db.RegisterMessage(TestNestedMap.BoolMapEntry) +_sym_db.RegisterMessage(TestNestedMap.Int32MapEntry) +_sym_db.RegisterMessage(TestNestedMap.Int64MapEntry) +_sym_db.RegisterMessage(TestNestedMap.Uint32MapEntry) +_sym_db.RegisterMessage(TestNestedMap.Uint64MapEntry) +_sym_db.RegisterMessage(TestNestedMap.StringMapEntry) +_sym_db.RegisterMessage(TestNestedMap.MapMapEntry) + +TestWrapper = _reflection.GeneratedProtocolMessageType('TestWrapper', (_message.Message,), dict( + DESCRIPTOR = _TESTWRAPPER, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestWrapper) + )) +_sym_db.RegisterMessage(TestWrapper) + +TestTimestamp = _reflection.GeneratedProtocolMessageType('TestTimestamp', (_message.Message,), dict( + DESCRIPTOR = _TESTTIMESTAMP, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestTimestamp) + )) +_sym_db.RegisterMessage(TestTimestamp) + +TestDuration = _reflection.GeneratedProtocolMessageType('TestDuration', (_message.Message,), dict( + DESCRIPTOR = _TESTDURATION, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestDuration) + )) +_sym_db.RegisterMessage(TestDuration) + +TestFieldMask = _reflection.GeneratedProtocolMessageType('TestFieldMask', (_message.Message,), dict( + DESCRIPTOR = _TESTFIELDMASK, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestFieldMask) + )) +_sym_db.RegisterMessage(TestFieldMask) + +TestStruct = _reflection.GeneratedProtocolMessageType('TestStruct', (_message.Message,), dict( + DESCRIPTOR = _TESTSTRUCT, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestStruct) + )) +_sym_db.RegisterMessage(TestStruct) + +TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict( + DESCRIPTOR = _TESTANY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestAny) + )) +_sym_db.RegisterMessage(TestAny) + +TestValue = _reflection.GeneratedProtocolMessageType('TestValue', (_message.Message,), dict( + DESCRIPTOR = _TESTVALUE, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestValue) + )) +_sym_db.RegisterMessage(TestValue) + +TestListValue = _reflection.GeneratedProtocolMessageType('TestListValue', (_message.Message,), dict( + DESCRIPTOR = _TESTLISTVALUE, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestListValue) + )) +_sym_db.RegisterMessage(TestListValue) + +TestBoolValue = _reflection.GeneratedProtocolMessageType('TestBoolValue', (_message.Message,), dict( + + BoolMapEntry = _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), dict( + DESCRIPTOR = _TESTBOOLVALUE_BOOLMAPENTRY, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestBoolValue.BoolMapEntry) + )) + , + DESCRIPTOR = _TESTBOOLVALUE, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestBoolValue) + )) +_sym_db.RegisterMessage(TestBoolValue) +_sym_db.RegisterMessage(TestBoolValue.BoolMapEntry) + +TestCustomJsonName = _reflection.GeneratedProtocolMessageType('TestCustomJsonName', (_message.Message,), dict( + DESCRIPTOR = _TESTCUSTOMJSONNAME, + __module__ = 'google.protobuf.util.json_format_proto3_pb2' + # @@protoc_insertion_point(class_scope:proto3.TestCustomJsonName) + )) +_sym_db.RegisterMessage(TestCustomJsonName) + + +_TESTMAP_BOOLMAPENTRY.has_options = True +_TESTMAP_BOOLMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_INT32MAPENTRY.has_options = True +_TESTMAP_INT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_INT64MAPENTRY.has_options = True +_TESTMAP_INT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_UINT32MAPENTRY.has_options = True +_TESTMAP_UINT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_UINT64MAPENTRY.has_options = True +_TESTMAP_UINT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTMAP_STRINGMAPENTRY.has_options = True +_TESTMAP_STRINGMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_BOOLMAPENTRY.has_options = True +_TESTNESTEDMAP_BOOLMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_INT32MAPENTRY.has_options = True +_TESTNESTEDMAP_INT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_INT64MAPENTRY.has_options = True +_TESTNESTEDMAP_INT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_UINT32MAPENTRY.has_options = True +_TESTNESTEDMAP_UINT32MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_UINT64MAPENTRY.has_options = True +_TESTNESTEDMAP_UINT64MAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_STRINGMAPENTRY.has_options = True +_TESTNESTEDMAP_STRINGMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTNESTEDMAP_MAPMAPENTRY.has_options = True +_TESTNESTEDMAP_MAPMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TESTBOOLVALUE_BOOLMAPENTRY.has_options = True +_TESTBOOLVALUE_BOOLMAPENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/google/protobuf/wrappers_pb2.py b/deps/google/protobuf/wrappers_pb2.py new file mode 100644 index 00000000..34398b7e --- /dev/null +++ b/deps/google/protobuf/wrappers_pb2.py @@ -0,0 +1,383 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/wrappers.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/protobuf/wrappers.proto', + package='google.protobuf', + syntax='proto3', + serialized_pb=_b('\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42S\n\x13\x63om.google.protobufB\rWrappersProtoP\x01\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_DOUBLEVALUE = _descriptor.Descriptor( + name='DoubleValue', + full_name='google.protobuf.DoubleValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.DoubleValue.value', index=0, + number=1, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=51, + serialized_end=79, +) + + +_FLOATVALUE = _descriptor.Descriptor( + name='FloatValue', + full_name='google.protobuf.FloatValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.FloatValue.value', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=81, + serialized_end=108, +) + + +_INT64VALUE = _descriptor.Descriptor( + name='Int64Value', + full_name='google.protobuf.Int64Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.Int64Value.value', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=110, + serialized_end=137, +) + + +_UINT64VALUE = _descriptor.Descriptor( + name='UInt64Value', + full_name='google.protobuf.UInt64Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.UInt64Value.value', index=0, + number=1, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=139, + serialized_end=167, +) + + +_INT32VALUE = _descriptor.Descriptor( + name='Int32Value', + full_name='google.protobuf.Int32Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.Int32Value.value', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=169, + serialized_end=196, +) + + +_UINT32VALUE = _descriptor.Descriptor( + name='UInt32Value', + full_name='google.protobuf.UInt32Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.UInt32Value.value', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=198, + serialized_end=226, +) + + +_BOOLVALUE = _descriptor.Descriptor( + name='BoolValue', + full_name='google.protobuf.BoolValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.BoolValue.value', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=228, + serialized_end=254, +) + + +_STRINGVALUE = _descriptor.Descriptor( + name='StringValue', + full_name='google.protobuf.StringValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.StringValue.value', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=256, + serialized_end=284, +) + + +_BYTESVALUE = _descriptor.Descriptor( + name='BytesValue', + full_name='google.protobuf.BytesValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='google.protobuf.BytesValue.value', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=286, + serialized_end=313, +) + +DESCRIPTOR.message_types_by_name['DoubleValue'] = _DOUBLEVALUE +DESCRIPTOR.message_types_by_name['FloatValue'] = _FLOATVALUE +DESCRIPTOR.message_types_by_name['Int64Value'] = _INT64VALUE +DESCRIPTOR.message_types_by_name['UInt64Value'] = _UINT64VALUE +DESCRIPTOR.message_types_by_name['Int32Value'] = _INT32VALUE +DESCRIPTOR.message_types_by_name['UInt32Value'] = _UINT32VALUE +DESCRIPTOR.message_types_by_name['BoolValue'] = _BOOLVALUE +DESCRIPTOR.message_types_by_name['StringValue'] = _STRINGVALUE +DESCRIPTOR.message_types_by_name['BytesValue'] = _BYTESVALUE + +DoubleValue = _reflection.GeneratedProtocolMessageType('DoubleValue', (_message.Message,), dict( + DESCRIPTOR = _DOUBLEVALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.DoubleValue) + )) +_sym_db.RegisterMessage(DoubleValue) + +FloatValue = _reflection.GeneratedProtocolMessageType('FloatValue', (_message.Message,), dict( + DESCRIPTOR = _FLOATVALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.FloatValue) + )) +_sym_db.RegisterMessage(FloatValue) + +Int64Value = _reflection.GeneratedProtocolMessageType('Int64Value', (_message.Message,), dict( + DESCRIPTOR = _INT64VALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Int64Value) + )) +_sym_db.RegisterMessage(Int64Value) + +UInt64Value = _reflection.GeneratedProtocolMessageType('UInt64Value', (_message.Message,), dict( + DESCRIPTOR = _UINT64VALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.UInt64Value) + )) +_sym_db.RegisterMessage(UInt64Value) + +Int32Value = _reflection.GeneratedProtocolMessageType('Int32Value', (_message.Message,), dict( + DESCRIPTOR = _INT32VALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.Int32Value) + )) +_sym_db.RegisterMessage(Int32Value) + +UInt32Value = _reflection.GeneratedProtocolMessageType('UInt32Value', (_message.Message,), dict( + DESCRIPTOR = _UINT32VALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.UInt32Value) + )) +_sym_db.RegisterMessage(UInt32Value) + +BoolValue = _reflection.GeneratedProtocolMessageType('BoolValue', (_message.Message,), dict( + DESCRIPTOR = _BOOLVALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.BoolValue) + )) +_sym_db.RegisterMessage(BoolValue) + +StringValue = _reflection.GeneratedProtocolMessageType('StringValue', (_message.Message,), dict( + DESCRIPTOR = _STRINGVALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.StringValue) + )) +_sym_db.RegisterMessage(StringValue) + +BytesValue = _reflection.GeneratedProtocolMessageType('BytesValue', (_message.Message,), dict( + DESCRIPTOR = _BYTESVALUE, + __module__ = 'google.protobuf.wrappers_pb2' + # @@protoc_insertion_point(class_scope:google.protobuf.BytesValue) + )) +_sym_db.RegisterMessage(BytesValue) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rWrappersProtoP\001\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes')) +# @@protoc_insertion_point(module_scope) diff --git a/deps/nest/__init__.py b/deps/nest/__init__.py new file mode 100644 index 00000000..8fab2fd7 --- /dev/null +++ b/deps/nest/__init__.py @@ -0,0 +1,10 @@ +# -*- coding:utf-8 -*- + +from .nest import Nest + +from .utils import CELSIUS +from .utils import FAHRENHEIT + +from .helpers import nest_login + +__all__ = ['CELSIUS', 'FAHRENHEIT', 'Nest', 'nest_login'] diff --git a/deps/nest/__pycache__/__init__.cpython-34.pyc b/deps/nest/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..14387bf9 Binary files /dev/null and b/deps/nest/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/nest/__pycache__/command_line.cpython-34.pyc b/deps/nest/__pycache__/command_line.cpython-34.pyc new file mode 100644 index 00000000..0da63f13 Binary files /dev/null and b/deps/nest/__pycache__/command_line.cpython-34.pyc differ diff --git a/deps/nest/__pycache__/helpers.cpython-34.pyc b/deps/nest/__pycache__/helpers.cpython-34.pyc new file mode 100644 index 00000000..3c9d8dba Binary files /dev/null and b/deps/nest/__pycache__/helpers.cpython-34.pyc differ diff --git a/deps/nest/__pycache__/nest.cpython-34.pyc b/deps/nest/__pycache__/nest.cpython-34.pyc new file mode 100644 index 00000000..66a756a6 Binary files /dev/null and b/deps/nest/__pycache__/nest.cpython-34.pyc differ diff --git a/deps/nest/__pycache__/utils.cpython-34.pyc b/deps/nest/__pycache__/utils.cpython-34.pyc new file mode 100644 index 00000000..01bea1da Binary files /dev/null and b/deps/nest/__pycache__/utils.cpython-34.pyc differ diff --git a/deps/nest/command_line.py b/deps/nest/command_line.py new file mode 100644 index 00000000..4e20f955 --- /dev/null +++ b/deps/nest/command_line.py @@ -0,0 +1,264 @@ +#! /usr/bin/python +# -*- coding:utf-8 -*- + +''' +nest.py -- a python interface to the Nest Thermostats +''' + +from __future__ import print_function + +import argparse +import os +import sys + +from . import nest +from . import utils +from . import helpers + + +def parse_args(): + prog = os.path.basename(sys.argv[0]) + config_file = os.path.sep.join(('~', '.config', prog, 'config')) + + conf_parser = argparse.ArgumentParser(prog=prog, add_help=False) + conf_parser.add_argument('--conf', default=config_file, + help='config file (default %s)' % config_file, + metavar='FILE') + + args, remaining_argv = conf_parser.parse_known_args() + + defaults = helpers.get_config(config_path=args.conf) + + description = 'Command line interface to Nest™ Thermostats' + parser = argparse.ArgumentParser(description=description, + parents=[conf_parser]) + + parser.set_defaults(**defaults) + + parser.add_argument('--token-cache', dest='token_cache', + help='auth access token cache file', + metavar='TOKEN_CACHE_FILE') + + parser.add_argument('-t', '--token', dest='token', + help='auth access token', metavar='TOKEN') + + parser.add_argument('-u', '--user', dest='user', + help='username for nest.com', metavar='USER') + + parser.add_argument('-p', '--password', dest='password', + help='password for nest.com', metavar='PASSWORD') + + parser.add_argument('-c', '--celsius', dest='celsius', action='store_true', + help='use celsius instead of farenheit') + + parser.add_argument('-s', '--serial', dest='serial', + help='optional, specify serial number of nest ' + 'thermostat to talk to') + + parser.add_argument('-S', '--structure', dest='structure', + help='optional, specify structure name to' + 'scope device actions') + + parser.add_argument('-i', '--index', dest='index', default=0, type=int, + help='optional, specify index number of nest to ' + 'talk to') + + subparsers = parser.add_subparsers(dest='command', + help='command help') + temp = subparsers.add_parser('temp', help='show/set temperature') + temp.add_argument('temperature', nargs='*', type=float, + help='target tempterature to set device to') + + fan = subparsers.add_parser('fan', help='set fan "on" or "auto"') + fan_group = fan.add_mutually_exclusive_group() + fan_group.add_argument('--auto', action='store_true', default=False, + help='set fan to auto') + fan_group.add_argument('--on', action='store_true', default=False, + help='set fan to on') + + mode = subparsers.add_parser('mode', help='show/set current mode') + mode_group = mode.add_mutually_exclusive_group() + mode_group.add_argument('--cool', action='store_true', default=False, + help='set mode to cool') + mode_group.add_argument('--heat', action='store_true', default=False, + help='set mode to heat') + mode_group.add_argument('--range', action='store_true', default=False, + help='set mode to range') + mode_group.add_argument('--off', action='store_true', default=False, + help='set mode to off') + + away = subparsers.add_parser('away', help='show/set current away status') + away_group = away.add_mutually_exclusive_group() + away_group.add_argument('--away', action='store_true', default=False, + help='set away status to "away"') + away_group.add_argument('--home', action='store_true', default=False, + help='set away status to "home"') + + subparsers.add_parser('target', help='show current temp target') + subparsers.add_parser('humid', help='show current humidity') + + target_hum = subparsers.add_parser('target_hum', + help='show/set target humidty') + target_hum.add_argument('humidity', nargs='*', + help='specify target humidity value or auto ' + 'to auto-select a humidity based on outside ' + 'temp') + + subparsers.add_parser('show', help='show everything') + + return parser.parse_args() + + +def main(): + args = parse_args() + + def _identity(x): + return x + + if args.celsius: + display_temp = _identity + convert_temp = _identity + + else: + display_temp = utils.c_to_f + convert_temp = utils.f_to_c + + cmd = args.command + + token_cache = None + if args.token_cache: + token_cache = os.path.expanduser(args.token_cache) + + # NOTE(jkoelker) Token caching is currently broken + token_cache = None + + with nest.Nest(args.user, args.password, access_token=args.token, + access_token_cache_file=token_cache) as napi: + if cmd == 'away': + structure = None + + if args.structure: + struct = [s for s in napi.structures + if s.name == args.structure] + if struct: + structure = struct[0] + + else: + if args.serial: + serial = args.serial + else: + serial = napi.devices[args.index]._serial + + struct = [s for s in napi.structures for d in s.devices + if d._serial == serial] + if struct: + structure = struct[0] + + if not structure: + structure = napi.structures[0] + + if args.away: + structure.away = True + + elif args.home: + structure.away = False + + print(structure.away) + return + + if args.serial: + device = nest.Device(args.serial, napi) + + elif args.structure: + struct = [s for s in napi.structures if s.name == args.structure] + if struct: + device = struct[0].devices[args.index] + + else: + device = napi.structures[0].devices[args.index] + + else: + device = napi.devices[args.index] + + if cmd == 'temp': + if args.temperature: + if len(args.temperature) > 1: + if device.mode != 'range': + device.mode = 'range' + + lower = convert_temp(args.temperature[0]) + upper = convert_temp(args.temperature[1]) + device.temperature = (lower, upper) + + else: + temp = convert_temp(args.temperature[0]) + device.temperature = temp + + print('%0.1f' % display_temp(device.temperature)) + + elif cmd == 'fan': + if args.auto: + device.fan = False + + elif args.on: + device.fan = True + + print(device.fan) + + elif cmd == 'mode': + if args.cool: + device.mode = 'cool' + + elif args.heat: + device.mode = 'heat' + + elif args.range: + device.mode = 'range' + + elif args.off: + device.mode = 'off' + + print(device.mode) + + elif cmd == 'humid': + print(device.humidity) + + elif cmd == 'target_hum': + if args.humidity: + device.target_humidity = args.humidity[0] + + print(device.target_humidity) + + elif cmd == 'target': + target = device.target + + if isinstance(target, tuple): + print('Lower: %0.1f' % display_temp(target[0])) + print('Upper: %0.1f' % display_temp(target[1])) + + else: + print('%0.1f' % display_temp(target)) + + elif cmd == 'show': + data = device._shared.copy() + data.update(device._device) + + for k in sorted(data.keys()): + intag = any(intag in k for intag in ('temp', 'away', + 'threshold')) + nottag = any(notag in k for notag in ('type', 'pin_hash', + 'scale', 'enabled')) + if intag and not nottag: + try: + temp_data = '%0.1f' % display_temp(data[k]) + print(k + '.'*(35-len(k)) + ':', temp_data) + + except Exception: + print(k + '.'*(35-len(k)) + ':', data[k]) + + else: + print(k + '.'*(35-len(k)) + ':', data[k]) + + +if __name__ == '__main__': + main() diff --git a/deps/nest/helpers.py b/deps/nest/helpers.py new file mode 100644 index 00000000..72cd6625 --- /dev/null +++ b/deps/nest/helpers.py @@ -0,0 +1,66 @@ +# -*- coding:utf-8 -*- +# a module of helper functions +# mostly for the configuration + +import contextlib +import os + +from . import nest + +# use six for python2/python3 compatibility +from six.moves import configparser + + +class MissingCredentialsError(ValueError): + pass + + +def get_config(config_path=None, prog='nest'): + if not config_path: + config_path = os.path.sep.join(('~', '.config', prog, 'config')) + + defaults = {'celsius': False} + config_file = os.path.expanduser(config_path) + if os.path.exists(config_file): + config = configparser.SafeConfigParser() + config.read([config_file]) + if config.has_section('nest'): + defaults.update(dict(config.items('nest'))) + + return defaults + + +def get_auth_credentials(config_path=None): + config = get_config(config_path) + username = config.get('user') + password = config.get('password') + return username, password + + +@contextlib.contextmanager +def nest_login(config_path=None, username=None, password=None, **kwargs): + """ + This a context manager for creating a Nest object using + authentication credentials either provided as keyword arguments + or read from the configuration file. + + :param config_path: Path to the config file. + The default is used if none is provided. + Optional if the the credentials are provided as arguments. + :param username: Optional if the config file contains the username. + :param password: Optional if the config file contains the password. + :param kwargs: Keyword arguments to pass onto the Nest initializer. + :return: Nest object + """ + + credentials_config = get_auth_credentials(config_path) + if not username: + username = credentials_config[0] + if not password: + password = credentials_config[1] + + if username and password: + yield nest.Nest(username, password, **kwargs) + else: + raise MissingCredentialsError( + 'The login credentials have not been provided.') diff --git a/deps/nest/nest.py b/deps/nest/nest.py new file mode 100644 index 00000000..221a8a39 --- /dev/null +++ b/deps/nest/nest.py @@ -0,0 +1,1043 @@ +# -*- coding:utf-8 -*- + +import collections +import copy +import datetime +import time +import os +import uuid +import weakref + +import requests +from requests import auth +from requests import adapters +from requests.compat import json +from requests import hooks + +try: + import pytz +except ImportError: + pytz = None + + +LOGIN_URL = 'https://home.nest.com/user/login' +AWAY_MAP = {'on': True, + 'away': True, + 'off': False, + 'home': False, + True: True, + False: False} +AZIMUTH_MAP = {'N': 0.0, 'NNE': 22.5, 'NE': 45.0, 'ENE': 67.5, 'E': 90.0, + 'ESE': 112.5, 'SE': 135.0, 'SSE': 157.5, 'S': 180.0, + 'SSW': 202.5, 'SW': 225.0, 'WSW': 247.5, 'W': 270.0, + 'WNW': 292.5, 'NW': 315.0, 'NNW': 337.5} + +AZIMUTH_ALIASES = (('North', 'N'), + ('North North East', 'NNE'), + ('North East', 'NE'), + ('North North East', 'NNE'), + ('East', 'E'), + ('East South East', 'ESE'), + ('South East', 'SE'), + ('South South East', 'SSE'), + ('South', 'S'), + ('South South West', 'SSW'), + ('South West', 'SW'), + ('West South West', 'WSW'), + ('West', 'W'), + ('West North West', 'WNW'), + ('North West', 'NW'), + ('North North West', 'NNW')) + +for (alias, key) in AZIMUTH_ALIASES: + AZIMUTH_MAP[alias] = AZIMUTH_MAP[key] + +FAN_MAP = {'auto on': 'auto', + 'on': 'on', + 'auto': 'auto', + 'always on': 'on', + '1': 'on', + '0': 'auto', + 1: 'on', + 0: 'auto', + True: 'on', + False: 'auto'} + + +LowHighTuple = collections.namedtuple('LowHighTuple', ('low', 'high')) + + +class NestTZ(datetime.tzinfo): + def __init__(self, gmt_offset): + self._offset = datetime.timedelta(hours=float(gmt_offset)) + self._name = gmt_offset + + def __repr__(self): + return '<%s: gmt_offset=%s>' % (self.__class__.__name__, + self._name) + + def utcoffset(self, dt): + return self._offset + + def tzname(self, dt): + return self._name + + def dst(self, dt): + return datetime.timedelta(0) + + +class NestAuth(auth.AuthBase): + def __init__(self, username, password, auth_callback=None, session=None, + access_token=None, access_token_cache_file=None): + self._res = {} + self.username = username + self.password = password + self.auth_callback = auth_callback + self._access_token_cache_file = access_token_cache_file + + if (access_token_cache_file is not None and + access_token is None and + os.path.exists(access_token_cache_file)): + with open(access_token_cache_file, 'r') as f: + self._res = json.load(f) + self._callback(self._res) + + if session is not None: + session = weakref.ref(session) + + self._session = session + self._adapter = adapters.HTTPAdapter() + + def _cache(self): + if self._access_token_cache_file is not None: + with os.fdopen(os.open(self._access_token_cache_file, + os.O_WRONLY | os.O_CREAT, 0o600), + 'w') as f: + json.dump(self._res, f) + + def _callback(self, res): + if self.auth_callback is not None and isinstance(self.auth_callback, + collections.Callable): + self.auth_callback(self._res) + + def _login(self, headers=None): + data = {'username': self.username, 'password': self.password} + + post = requests.post + + if self._session: + session = self._session() + post = session.post + + response = post(LOGIN_URL, data=data, headers=headers) + response.raise_for_status() + self._res = response.json() + + self._cache() + self._callback(self._res) + + def _perhaps_relogin(self, r, **kwargs): + if r.status_code == 401: + self._login(r.headers.copy()) + req = r.request.copy() + req.hooks = hooks.default_hooks() + req.headers['Authorization'] = 'Basic ' + self.access_token + + adapter = self._adapter + if self._session: + session = self.session() + if session: + adapter = session.get_adapter(req.url) + + response = adapter.send(req, **kwargs) + response.history.append(r) + + return response + + return r + + @property + def access_token(self): + return self._res.get('access_token') + + @property + def urls(self): + if not self._res.get('urls'): + # NOTE(jkoelker) Bootstrap the URLs + self._login() + + return self._res.get('urls') + + @property + def user(self): + return self._res.get('user') + + def __call__(self, r): + if self.access_token: + r.headers['Authorization'] = 'Basic ' + self.access_token + + r.register_hook('response', self._perhaps_relogin) + return r + + +class Wind(object): + def __init__(self, direction=None, kph=None): + self.direction = direction + self.kph = kph + + @property + def azimuth(self): + return AZIMUTH_MAP[self.direction] + + +class Forecast(object): + def __init__(self, forecast, tz=None): + self._forecast = forecast + self._tz = tz + self.condition = forecast.get('condition') + self.humidity = forecast['humidity'] + self._icon = forecast.get('icon') + + fget = forecast.get + self._time = float(fget('observation_time', + fget('time', + fget('date', + fget('observation_epoch', + time.time()))))) + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, + self.datetime.strftime('%Y-%m-%d %H:%M:%S')) + + @property + def datetime(self): + return datetime.datetime.fromtimestamp(self._time, self._tz) + + @property + def temperature(self): + if 'temp_low_c' in self._forecast: + return LowHighTuple(self._forecast['temp_low_c'], + self._forecast['temp_high_c']) + + return self._forecast['temp_c'] + + @property + def wind(self): + return Wind(self._forecast['wind_dir'], self._forecast.get('wind_kph')) + + +class Weather(object): + def __init__(self, weather, local_time): + self._weather = weather + + self._tz = None + if local_time: + if pytz: + self._tz = pytz.timezone(weather['location']['timezone_long']) + + else: + self._tz = NestTZ(weather['location']['gmt_offset']) + + @property + def _current(self): + return self._weather['current'] + + @property + def _daily(self): + return self._weather['forecast']['daily'] + + @property + def _hourly(self): + return self._weather['forecast']['hourly'] + + @property + def current(self): + return Forecast(self._current, self._tz) + + @property + def daily(self): + return [Forecast(f, self._tz) for f in self._daily] + + @property + def hourly(self): + return [Forecast(f, self._tz) for f in self._hourly] + + +class NestBase(object): + def __init__(self, serial, nest_api, local_time=False): + self._serial = serial + self._nest_api = nest_api + self._local_time = local_time + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self._repr_name) + + def _set(self, what, data): + url = '%s/v2/put/%s.%s' % (self._nest_api.urls['transport_url'], + what, self._serial) + response = self._nest_api._session.post(url, data=json.dumps(data)) + response.raise_for_status() + + self._nest_api._bust_cache() + + @property + def _weather(self): + merge_code = self.postal_code + ',' + self.country_code + return self._nest_api._weather[merge_code] + + @property + def weather(self): + return Weather(self._weather, self._local_time) + + @property + def serial(self): + return self._serial + + @property + def name(self): + return self._serial + + @property + def _repr_name(self): + return self.name + + +class Device(NestBase): + @property + def _device(self): + return self._nest_api._status['device'][self._serial] + + @property + def _shared(self): + return self._nest_api._status['shared'][self._serial] + + @property + def _link(self): + return self._nest_api._status['link'][self._serial] + + @property + def _track(self): + return self._nest_api._status['track'][self._serial] + + @property + def _repr_name(self): + if self.name: + return self.name + + return self.where + + @property + def structure(self): + return Structure(self._link['structure'].split('.')[-1], + self._nest_api, self._local_time) + + @property + def where(self): + if 'where_id' in self._device: + return self.structure.wheres[self._device['where_id']] + + @where.setter + def where(self, value): + value = value.lower() + ident = self.structure.wheres.get(value) + + if ident is None: + self.structure.add_where(value) + ident = self.structure.wheres[value] + + self._set('device', {'where_id': ident}) + + @property + def fan(self): + return self._shared['hvac_fan_state'] + + @fan.setter + def fan(self, value): + self._set('device', {'fan_mode': FAN_MAP.get(value, 'auto')}) + + @property + def humidity(self): + return self._device['current_humidity'] + + @property + def target_humidity(self): + return self._device['target_humidity'] + + @target_humidity.setter + def target_humidity(self, value): + if value == 'auto': + + if self._weather['current']['temp_c'] >= 4.44: + hum_value = 45 + elif self._weather['current']['temp_c'] >= -1.11: + hum_value = 40 + elif self._weather['current']['temp_c'] >= -6.67: + hum_value = 35 + elif self._weather['current']['temp_c'] >= -12.22: + hum_value = 30 + elif self._weather['current']['temp_c'] >= -17.78: + hum_value = 25 + elif self._weather['current']['temp_c'] >= -23.33: + hum_value = 20 + elif self._weather['current']['temp_c'] >= -28.89: + hum_value = 15 + elif self._weather['current']['temp_c'] >= -34.44: + hum_value = 10 + else: + hum_value = value + + if float(hum_value) != self._device['target_humidity']: + self._set('device', {'target_humidity': float(hum_value)}) + + @property + def mode(self): + return self._shared['target_temperature_type'] + + @mode.setter + def mode(self, value): + self._set('shared', {'target_temperature_type': value.lower()}) + + @property + def name(self): + return self._shared['name'] + + @name.setter + def name(self, value): + self._set('shared', {'name': value}) + + @property + def hvac_ac_state(self): + return self._shared['hvac_ac_state'] + + @property + def hvac_cool_x2_state(self): + return self._shared['hvac_cool_x2_state'] + + @property + def hvac_heater_state(self): + return self._shared['hvac_heater_state'] + + @property + def hvac_aux_heater_state(self): + return self._shared['hvac_aux_heater_state'] + + @property + def hvac_heat_x2_state(self): + return self._shared['hvac_heat_x2_state'] + + @property + def hvac_heat_x3_state(self): + return self._shared['hvac_heat_x3_state'] + + @property + def hvac_alt_heat_state(self): + return self._shared['hvac_alt_heat_state'] + + @property + def hvac_alt_heat_x2_state(self): + return self._shared['hvac_alt_heat_x2_state'] + + @property + def hvac_emer_heat_state(self): + return self._shared['hvac_emer_heat_state'] + + @property + def online(self): + return self._track['online'] + + @property + def local_ip(self): + return self._device['local_ip'] + + @property + def last_ip(self): + return self._track['last_ip'] + + @property + def last_connection(self): + return self._track['last_connection'] + + @property + def error_code(self): + return self._device['error_code'] + + @property + def battery_level(self): + return self._device['battery_level'] + + @property + def postal_code(self): + return self._device['postal_code'] + + @property + def temperature(self): + return self._shared['current_temperature'] + + @temperature.setter + def temperature(self, value): + self.target = value + + @property + def target(self): + if self._shared['target_temperature_type'] == 'range': + low = self._shared['target_temperature_low'] + high = self._shared['target_temperature_high'] + return LowHighTuple(low, high) + + return self._shared['target_temperature'] + + @target.setter + def target(self, value): + data = {'target_change_pending': True} + + if self._shared['target_temperature_type'] == 'range': + data['target_temperature_low'] = value[0] + data['target_temperature_high'] = value[1] + + else: + data['target_temperature'] = value + + self._set('shared', data) + + @property + def away_temperature(self): + low = None + high = None + + if self._device['away_temperature_low_enabled']: + low = self._device['away_temperature_low'] + + if self._device['away_temperature_high_enabled']: + high = self._device['away_temperature_high'] + + return LowHighTuple(low, high) + + @away_temperature.setter + def away_temperature(self, value): + low, high = value + + data = {} + if low is not None: + data['away_temperature_low'] = low + data['away_temperature_low_enabled'] = True + + else: + data['away_temperature_low_enabled'] = False + + if high is not None: + data['away_temperature_high'] = high + data['away_temperature_high_enabled'] = True + + else: + data['away_temperature_high_enabled'] = False + + self._set('device', data) + + +class ProtectDevice(NestBase): + @property + def _device(self): + return self._nest_api._status['topaz'][self._serial] + + @property + def _repr_name(self): + if self.name: + return self.name + + return self.where + + @property + def structure(self): + return Structure(self._device['structure_id'], + self._nest_api, self._local_time) + + @property + def where(self): + if 'where_id' in self._device: + return self.structure.wheres[self._device['where_id']] + + @property + def auto_away(self): + return self._device['auto_away'] + + @property + def battery_health_state(self): + return self._device['battery_health_state'] + + @property + def battery_level(self): + return self._device['battery_level'] + + @property + def capability_level(self): + return self._device['capability_level'] + + @property + def certification_body(self): + return self._device['certification_body'] + + @property + def co_blame_duration(self): + if 'co_blame_duration' in self._device: + return self._device['co_blame_duration'] + + @property + def co_blame_threshold(self): + if 'co_blame_threshold' in self._device: + return self._device['co_blame_threshold'] + + @property + def co_previous_peak(self): + if 'co_previous_peak' in self._device: + return self._device['co_previous_peak'] + + @property + def co_sequence_number(self): + return self._device['co_sequence_number'] + + @property + def co_status(self): + return self._device['co_status'] + + @property + def component_als_test_passed(self): + return self._device['component_als_test_passed'] + + @property + def component_co_test_passed(self): + return self._device['component_co_test_passed'] + + @property + def component_heat_test_passed(self): + return self._device['component_heat_test_passed'] + + @property + def component_hum_test_passed(self): + return self._device['component_hum_test_passed'] + + @property + def component_led_test_passed(self): + return self._device['component_led_test_passed'] + + @property + def component_pir_test_passed(self): + return self._device['component_pir_test_passed'] + + @property + def component_smoke_test_passed(self): + return self._device['component_smoke_test_passed'] + + @property + def component_temp_test_passed(self): + return self._device['component_temp_test_passed'] + + @property + def component_us_test_passed(self): + return self._device['component_us_test_passed'] + + @property + def component_wifi_test_passed(self): + return self._device['component_wifi_test_passed'] + + @property + def creation_time(self): + return self._device['creation_time'] + + @property + def description(self): + return self._device['description'] + + @property + def device_external_color(self): + return self._device['device_external_color'] + + @property + def device_locale(self): + return self._device['device_locale'] + + @property + def fabric_id(self): + return self._device['fabric_id'] + + @property + def factory_loaded_languages(self): + return self._device['factory_loaded_languages'] + + @property + def gesture_hush_enable(self): + return self._device['gesture_hush_enable'] + + @property + def heads_up_enable(self): + return self._device['heads_up_enable'] + + @property + def home_alarm_link_capable(self): + return self._device['home_alarm_link_capable'] + + @property + def home_alarm_link_connected(self): + return self._device['home_alarm_link_connected'] + + @property + def home_alarm_link_type(self): + return self._device['home_alarm_link_type'] + + @property + def hushed_state(self): + return self._device['hushed_state'] + + @property + def installed_locale(self): + return self._device['installed_locale'] + + @property + def kl_software_version(self): + return self._device['kl_software_version'] + + @property + def latest_manual_test_cancelled(self): + return self._device['latest_manual_test_cancelled'] + + @property + def latest_manual_test_end_utc_secs(self): + return self._device['latest_manual_test_end_utc_secs'] + + @property + def latest_manual_test_start_utc_secs(self): + return self._device['latest_manual_test_start_utc_secs'] + + @property + def line_power_present(self): + return self._device['line_power_present'] + + @property + def night_light_continuous(self): + if 'night_light_continuous' in self._device: + return self._device['night_light_continuous'] + + @property + def night_light_enable(self): + return self._device['night_light_enable'] + + @property + def ntp_green_led_enable(self): + return self._device['ntp_green_led_enable'] + + @property + def product_id(self): + return self._device['product_id'] + + @property + def replace_by_date_utc_secs(self): + return self._device['replace_by_date_utc_secs'] + + @property + def resource_id(self): + return self._device['resource_id'] + + @property + def smoke_sequence_number(self): + return self._device['smoke_sequence_number'] + + @property + def smoke_status(self): + return self._device['smoke_status'] + + @property + def software_version(self): + return self._device['software_version'] + + @property + def spoken_where_id(self): + return self._device['spoken_where_id'] + + @property + def steam_detection_enable(self): + return self._device['steam_detection_enable'] + + @property + def thread_mac_address(self): + return self._device['thread_mac_address'] + + @property + def where_id(self): + return self._device['where_id'] + + @property + def wifi_ip_address(self): + return self._device['wifi_ip_address'] + + @property + def wifi_mac_address(self): + return self._device['wifi_mac_address'] + + @property + def wifi_regulatory_domain(self): + return self._device['wifi_regulatory_domain'] + + @property + def wired_led_enable(self): + return self._device['wired_led_enable'] + + @property + def wired_or_battery(self): + return self._device['wired_or_battery'] + + +class Structure(NestBase): + @property + def _structure(self): + return self._nest_api._status['structure'][self._serial] + + def _set_away(self, value, auto_away=False): + self._set('structure', {'away': AWAY_MAP[value], + 'away_timestamp': int(time.time()), + 'away_setter': int(auto_away)}) + + @property + def away(self): + return self._structure['away'] + + @away.setter + def away(self, value): + self._set_away(value) + + @property + def country_code(self): + return self._structure['country_code'] + + @property + def devices(self): + return [Device(devid.split('.')[-1], self._nest_api, + self._local_time) + for devid in self._structure.get('devices', [])] + + @property + def protectdevices(self): + return [ProtectDevice(topazid.split('.')[-1], self._nest_api, + self._local_time) + for topazid in self._nest_api._status.get('topaz', [])] + + @property + def dr_reminder_enabled(self): + return self._structure['dr_reminder_enabled'] + + @property + def emergency_contact_description(self): + return self._structure['emergency_contact_description'] + + @property + def emergency_contact_type(self): + return self._structure['emergency_contact_type'] + + @property + def emergency_contact_phone(self): + return self._structure['emergency_contact_phone'] + + @property + def enhanced_auto_away_enabled(self): + return self._structure['topaz_enhanced_auto_away_enabled'] + + @property + def eta_preconditioning_active(self): + return self._structure['eta_preconditioning_active'] + + @property + def house_type(self): + return self._structure['house_type'] + + @property + def hvac_safety_shutoff_enabled(self): + return self._structure['hvac_safety_shutoff_enabled'] + + @property + def name(self): + return self._structure['name'] + + @name.setter + def name(self, value): + self._set('structure', {'name': value}) + + @property + def location(self): + return self._structure.get('location') + + @property + def address(self): + return self._structure.get('street_address') + + @property + def num_thermostats(self): + return self._structure['num_thermostats'] + + @property + def measurement_scale(self): + return self._structure['measurement_scale'] + + @property + def postal_code(self): + return self._structure['postal_code'] + + @property + def renovation_date(self): + return self._structure['renovation_date'] + + @property + def structure_area(self): + return self._structure['structure_area'] + + @property + def time_zone(self): + return self._structure['time_zone'] + + @property + def _wheres(self): + return self._nest_api._status['where'][self._serial]['wheres'] + + @property + def wheres(self): + ret = {w['name'].lower(): w['where_id'] for w in self._wheres} + ret.update({v: k for k, v in ret.items()}) + return ret + + @wheres.setter + def wheres(self, value): + self._set('where', {'wheres': value}) + + def add_where(self, name, ident=None): + name = name.lower() + + if name in self.wheres: + return self.wheres[name] + + name = ' '.join([n.capitalize() for n in name.split()]) + wheres = copy.copy(self._wheres) + + if ident is None: + ident = str(uuid.uuid4()) + + wheres.append({'name': name, 'where_id': ident}) + self.wheres = wheres + + return self.add_where(name) + + def remove_where(self, name): + name = name.lower() + + if name not in self.wheres: + return None + + ident = self.wheres[name] + + wheres = [w for w in copy.copy(self._wheres) + if w['name'] != name and w['where_id'] != ident] + + self.wheres = wheres + return ident + + +class WeatherCache(object): + def __init__(self, nest_api, cache_ttl=270): + self._nest_api = nest_api + self._cache_ttl = cache_ttl + self._cache = {} + + def __getitem__(self, postal_code): + value, last_update = self._cache.get(postal_code, (None, 0)) + now = time.time() + + if not value or now - last_update > self._cache_ttl: + url = self._nest_api.urls['weather_url'] + postal_code + response = self._nest_api._session.get(url) + response.raise_for_status() + value = response.json()[postal_code] + self._cache[postal_code] = (value, now) + + return value + + +class Nest(object): + def __init__(self, username, password, cache_ttl=270, + user_agent='Nest/1.1.0.10 CFNetwork/548.0.4', + access_token=None, access_token_cache_file=None, + local_time=False): + self._urls = {} + self._limits = {} + self._user = None + self._userid = None + self._weave = None + self._staff = False + self._superuser = False + self._email = None + self._cache_ttl = cache_ttl + self._cache = (None, 0) + self._weather = WeatherCache(self) + self._local_time = local_time + + def auth_callback(result): + self._urls = result['urls'] + self._limits = result['limits'] + self._user = result['user'] + self._userid = result['userid'] + self._weave = result['weave'] + self._staff = result['is_staff'] + self._superuser = result['is_superuser'] + self._email = result['email'] + + self._user_agent = user_agent + self._session = requests.Session() + auth = NestAuth(username, password, auth_callback=auth_callback, + session=self._session, access_token=access_token, + access_token_cache_file=access_token_cache_file) + self._session.auth = auth + + headers = {'user-agent': 'Nest/1.1.0.10 CFNetwork/548.0.4', + 'X-nl-protocol-version': '1'} + self._session.headers.update(headers) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + return False + + @property + def _status(self): + value, last_update = self._cache + now = time.time() + + if not value or now - last_update > self._cache_ttl: + url = self.urls['transport_url'] + '/v2/mobile/' + self.user + response = self._session.get(url) + response.raise_for_status() + value = response.json() + self._cache = (value, now) + + return value + + def _bust_cache(self): + self._cache = (None, 0) + + @property + def devices(self): + return [Device(devid.split('.')[-1], self, self._local_time) + for devid in self._status['device']] + + @property + def protectdevices(self): + return [ProtectDevice(topazid.split('.')[-1], self, self._local_time) + for topazid in self._status['topaz']] + + @property + def structures(self): + return [Structure(stid, self, self._local_time) + for stid in self._status['structure']] + + @property + def urls(self): + return self._session.auth.urls + + @property + def user(self): + return self._session.auth.user diff --git a/deps/nest/utils.py b/deps/nest/utils.py new file mode 100644 index 00000000..08b09724 --- /dev/null +++ b/deps/nest/utils.py @@ -0,0 +1,29 @@ +# -*- coding:utf-8 -*- + +import decimal + +CELSIUS = 'C' +FAHRENHEIT = 'F' +_THIRTYTWO = decimal.Decimal(32) +_ONEPOINTEIGHT = decimal.Decimal(18) / decimal.Decimal(10) +_TENPOINTSEVENSIXFOUR = decimal.Decimal(10764) / decimal.Decimal(1000) + + +def f_to_c(temp): + temp = decimal.Decimal(temp) + return float((temp - _THIRTYTWO) / _ONEPOINTEIGHT) + + +def c_to_f(temp): + temp = decimal.Decimal(temp) + return float(temp * _ONEPOINTEIGHT + _THIRTYTWO) + + +def ft2_to_m2(area): + area = decimal.Decimal(area) + return float(area / _TENPOINTSEVENSIXFOUR) + + +def m2_to_ft2(area): + area = decimal.Decimal(area) + return float(area * _TENPOINTSEVENSIXFOUR) diff --git a/deps/netdisco-0.7.1.dist-info/DESCRIPTION.rst b/deps/netdisco-0.7.1.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..e1187231 --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +UNKNOWN + + diff --git a/deps/netdisco-0.7.1.dist-info/INSTALLER b/deps/netdisco-0.7.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/netdisco-0.7.1.dist-info/METADATA b/deps/netdisco-0.7.1.dist-info/METADATA new file mode 100644 index 00000000..48039b1f --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/METADATA @@ -0,0 +1,16 @@ +Metadata-Version: 2.0 +Name: netdisco +Version: 0.7.1 +Summary: Discover devices on your local network +Home-page: https://github.com/home-assistant/netdisco +Author: Paulus Schoutsen +Author-email: Paulus@PaulusSchoutsen.nl +License: MIT +Platform: UNKNOWN +Requires-Dist: netifaces (>=0.10.0) +Requires-Dist: requests (>=2.0) +Requires-Dist: zeroconf (==0.17.6) + +UNKNOWN + + diff --git a/deps/netdisco-0.7.1.dist-info/RECORD b/deps/netdisco-0.7.1.dist-info/RECORD new file mode 100644 index 00000000..f41413d3 --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/RECORD @@ -0,0 +1,67 @@ +netdisco/__init__.py,sha256=05f9b3QLx2GFyRTah3I3QqXtgSsttQ5rCWQjLLXA4H0,79 +netdisco/__main__.py,sha256=kNTQt64eZHzQn9hVJpG9LlQ8_akkjjOIzExI9Xeuv9w,705 +netdisco/const.py,sha256=gE1_GWmbsYFpgC6vBGeEJb7w_SqwFyDexARW3g_Swd4,379 +netdisco/discovery.py,sha256=dYluyHOXabE3ET6onxeDbpB3EkY-k0tUygQowD9WAlk,3600 +netdisco/gdm.py,sha256=lZ8w5kyrypbV3D32Nhnk3Z9CFHzILU0Dgl8evJCh4QY,3396 +netdisco/lms.py,sha256=HssiP9SlwNVKXt9tYU3uNtjH_U5CGAbPf46aVXoCYMo,1780 +netdisco/mdns.py,sha256=b4Bn07WhcvI2QBNhbl58kKVTpbpFBpJqxvFv_eZ43Gw,1034 +netdisco/service.py,sha256=RWuBF8DArsdli1ixx0HKR-cs8pYHQyHR5SEa1WaM38c,2596 +netdisco/ssdp.py,sha256=uB2jlGXHrAwJJTBggMlomgdntTJ9mRfz7Q2lQE9mjCk,8137 +netdisco/tellstick.py,sha256=rfeMH3TkO0UPd3JxuDDW-rY1X8SZ4vm0XeYibsV0CrE,1815 +netdisco/util.py,sha256=QHyald4Tb7jUnTaMNudXTOyl5iEeDMqx2Xo0XGN9NmY,1550 +netdisco/discoverables/__init__.py,sha256=zoCYXrQZRV6FWEHK_HSfusXTHWV3HINcnvFKatpD89o,4229 +netdisco/discoverables/belkin_wemo.py,sha256=Uv5rXXByd_mfVISQE0OF2CwcLZLF72UazY7NLXXdv3o,674 +netdisco/discoverables/directv.py,sha256=gYSm2k1NHae_7onNMQNmM-5rItJutBEAt86RcxxGCFk,693 +netdisco/discoverables/DLNA.py,sha256=y2iDXg6Y5ud9QPlhtTJaf3Ga32QlBqKHgiqaQrkHlNc,352 +netdisco/discoverables/google_cast.py,sha256=azO0WAyXupFOp0rTqfdG93vJUP_jae3ODkrLSiXHQeg,392 +netdisco/discoverables/home_assistant.py,sha256=3zfL6qNeNCNwt35sCC8ssviW4VLT82do3rUHYJm1HJM,778 +netdisco/discoverables/homekit.py,sha256=TfLxTkC32ogCAbsx02LXJeh_MmZYug34cT0bZz__qpY,763 +netdisco/discoverables/kodi.py,sha256=MkLoAku2TQPzLa61Dhn1lPDSCPh4cpiTkicz802q3eM,679 +netdisco/discoverables/logitech_mediaserver.py,sha256=6K1wpSAumPo1Y-FkPSaf4y1XcAyUs0nybHhFw42KcgQ,443 +netdisco/discoverables/mystrom.py,sha256=36zh2VCViZF5jWeISZZ4aEgKF8BHp05L6GS0NMndJBw,740 +netdisco/discoverables/netgear_router.py,sha256=_B25XtDS1O1oaADuNIuHsac1GQWUk3A8mlHVk6GGtY0,681 +netdisco/discoverables/panasonic_viera.py,sha256=FcRfKNZ0Dfi9GfXh8c1zuNcptPwyMiZ4XRJX84SMpHw,625 +netdisco/discoverables/philips_hue.py,sha256=dchaQ_OC6hKPAZTp94Lu40LQccNHiOY9EhKqnTUY_gk,660 +netdisco/discoverables/plex_mediaserver.py,sha256=3DMHxaB25hnGRUADKF4O7xdCntFqt-zdf3msbRzZSNk,510 +netdisco/discoverables/roku.py,sha256=jPv9V49Fxq-FQFCieeInvHXKJMp6Kj282zQ_cN_8oSo,487 +netdisco/discoverables/sabnzbd.py,sha256=w8je2QCzzUp58Z3N6OBD84HNMSXGe4ZNbEUhrNPsX5o,750 +netdisco/discoverables/sonos.py,sha256=RUl_CIfYvIbZyuOd7ky9Lhj1gHlXjN1_1P08qxBR5xw,548 +netdisco/discoverables/tellstick.py,sha256=ojbb5K8o0JkistksNA3QKn2rRcQfKyaUxTeikGMGgkk,395 +netdisco/discoverables/webos_tv.py,sha256=SoHjM7OPI21pLFCUJoGQwtbyMyTpBl8MDVmLdYofTCE,663 +netdisco-0.7.1.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 +netdisco-0.7.1.dist-info/METADATA,sha256=GBUVKd9JMezBpPAcRbtplqtkCxNQN7WKQqxlKN_mdxo,364 +netdisco-0.7.1.dist-info/metadata.json,sha256=q8nv0cBEyFLe87EZG-mcT1imV72nGJfnv_BsubUqYgs,552 +netdisco-0.7.1.dist-info/RECORD,, +netdisco-0.7.1.dist-info/top_level.txt,sha256=quRtQ_fhb5skqxQOtPi0jHhLTts9emXn_98P_rRl9TY,9 +netdisco-0.7.1.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +netdisco-0.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +netdisco/discoverables/__pycache__/mystrom.cpython-34.pyc,, +netdisco/discoverables/__pycache__/plex_mediaserver.cpython-34.pyc,, +netdisco/__pycache__/__init__.cpython-34.pyc,, +netdisco/discoverables/__pycache__/home_assistant.cpython-34.pyc,, +netdisco/discoverables/__pycache__/webos_tv.cpython-34.pyc,, +netdisco/__pycache__/util.cpython-34.pyc,, +netdisco/__pycache__/__main__.cpython-34.pyc,, +netdisco/discoverables/__pycache__/sonos.cpython-34.pyc,, +netdisco/discoverables/__pycache__/tellstick.cpython-34.pyc,, +netdisco/__pycache__/mdns.cpython-34.pyc,, +netdisco/discoverables/__pycache__/belkin_wemo.cpython-34.pyc,, +netdisco/__pycache__/tellstick.cpython-34.pyc,, +netdisco/discoverables/__pycache__/sabnzbd.cpython-34.pyc,, +netdisco/__pycache__/lms.cpython-34.pyc,, +netdisco/discoverables/__pycache__/directv.cpython-34.pyc,, +netdisco/discoverables/__pycache__/kodi.cpython-34.pyc,, +netdisco/__pycache__/const.cpython-34.pyc,, +netdisco/discoverables/__pycache__/homekit.cpython-34.pyc,, +netdisco/__pycache__/gdm.cpython-34.pyc,, +netdisco/discoverables/__pycache__/__init__.cpython-34.pyc,, +netdisco/__pycache__/discovery.cpython-34.pyc,, +netdisco/discoverables/__pycache__/google_cast.cpython-34.pyc,, +netdisco/discoverables/__pycache__/philips_hue.cpython-34.pyc,, +netdisco/__pycache__/ssdp.cpython-34.pyc,, +netdisco/discoverables/__pycache__/DLNA.cpython-34.pyc,, +netdisco/discoverables/__pycache__/panasonic_viera.cpython-34.pyc,, +netdisco/discoverables/__pycache__/logitech_mediaserver.cpython-34.pyc,, +netdisco/discoverables/__pycache__/netgear_router.cpython-34.pyc,, +netdisco/discoverables/__pycache__/roku.cpython-34.pyc,, +netdisco/__pycache__/service.cpython-34.pyc,, diff --git a/deps/netdisco-0.7.1.dist-info/WHEEL b/deps/netdisco-0.7.1.dist-info/WHEEL new file mode 100644 index 00000000..9dff69d8 --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deps/netdisco-0.7.1.dist-info/metadata.json b/deps/netdisco-0.7.1.dist-info/metadata.json new file mode 100644 index 00000000..c127b535 --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"run_requires": [{"requires": ["netifaces (>=0.10.0)", "requests (>=2.0)", "zeroconf (==0.17.6)"]}], "extras": [], "summary": "Discover devices on your local network", "generator": "bdist_wheel (0.24.0)", "metadata_version": "2.0", "extensions": {"python.details": {"contacts": [{"email": "Paulus@PaulusSchoutsen.nl", "role": "author", "name": "Paulus Schoutsen"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/home-assistant/netdisco"}}}, "name": "netdisco", "version": "0.7.1", "license": "MIT"} \ No newline at end of file diff --git a/deps/netdisco-0.7.1.dist-info/top_level.txt b/deps/netdisco-0.7.1.dist-info/top_level.txt new file mode 100644 index 00000000..dfc05501 --- /dev/null +++ b/deps/netdisco-0.7.1.dist-info/top_level.txt @@ -0,0 +1 @@ +netdisco diff --git a/deps/netdisco/__init__.py b/deps/netdisco/__init__.py new file mode 100644 index 00000000..7b4bb4cb --- /dev/null +++ b/deps/netdisco/__init__.py @@ -0,0 +1 @@ +"""Module to scan the network using uPnP and mDNS for devices and services.""" diff --git a/deps/netdisco/__main__.py b/deps/netdisco/__main__.py new file mode 100644 index 00000000..01068306 --- /dev/null +++ b/deps/netdisco/__main__.py @@ -0,0 +1,31 @@ +"""Command line tool to print discocvered devices or dump raw data.""" +from __future__ import print_function +import sys + +from netdisco.discovery import NetworkDiscovery + + +def main(): + """Handle command line execution.""" + netdisco = NetworkDiscovery() + + netdisco.scan() + + # Pass in command line argument dump to get the raw data + if sys.argv[-1] == 'dump': + netdisco.print_raw_data() + print() + print() + + print("Discovered devices:") + count = 0 + for dev in netdisco.discover(): + count += 1 + print(dev, netdisco.get_info(dev)) + print() + print("Discovered {} devices".format(count)) + + netdisco.stop() + +if __name__ == '__main__': + main() diff --git a/deps/netdisco/__pycache__/__init__.cpython-34.pyc b/deps/netdisco/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..3b3c44e4 Binary files /dev/null and b/deps/netdisco/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/__main__.cpython-34.pyc b/deps/netdisco/__pycache__/__main__.cpython-34.pyc new file mode 100644 index 00000000..5f28aaba Binary files /dev/null and b/deps/netdisco/__pycache__/__main__.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/const.cpython-34.pyc b/deps/netdisco/__pycache__/const.cpython-34.pyc new file mode 100644 index 00000000..e335a7e1 Binary files /dev/null and b/deps/netdisco/__pycache__/const.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/discovery.cpython-34.pyc b/deps/netdisco/__pycache__/discovery.cpython-34.pyc new file mode 100644 index 00000000..9842f47e Binary files /dev/null and b/deps/netdisco/__pycache__/discovery.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/gdm.cpython-34.pyc b/deps/netdisco/__pycache__/gdm.cpython-34.pyc new file mode 100644 index 00000000..404aaacd Binary files /dev/null and b/deps/netdisco/__pycache__/gdm.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/lms.cpython-34.pyc b/deps/netdisco/__pycache__/lms.cpython-34.pyc new file mode 100644 index 00000000..76a19e2e Binary files /dev/null and b/deps/netdisco/__pycache__/lms.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/mdns.cpython-34.pyc b/deps/netdisco/__pycache__/mdns.cpython-34.pyc new file mode 100644 index 00000000..debb576c Binary files /dev/null and b/deps/netdisco/__pycache__/mdns.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/service.cpython-34.pyc b/deps/netdisco/__pycache__/service.cpython-34.pyc new file mode 100644 index 00000000..32a9b8e6 Binary files /dev/null and b/deps/netdisco/__pycache__/service.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/ssdp.cpython-34.pyc b/deps/netdisco/__pycache__/ssdp.cpython-34.pyc new file mode 100644 index 00000000..a2e2b0fa Binary files /dev/null and b/deps/netdisco/__pycache__/ssdp.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/tellstick.cpython-34.pyc b/deps/netdisco/__pycache__/tellstick.cpython-34.pyc new file mode 100644 index 00000000..2d133e57 Binary files /dev/null and b/deps/netdisco/__pycache__/tellstick.cpython-34.pyc differ diff --git a/deps/netdisco/__pycache__/util.cpython-34.pyc b/deps/netdisco/__pycache__/util.cpython-34.pyc new file mode 100644 index 00000000..4335cfdf Binary files /dev/null and b/deps/netdisco/__pycache__/util.cpython-34.pyc differ diff --git a/deps/netdisco/const.py b/deps/netdisco/const.py new file mode 100644 index 00000000..2ce0f413 --- /dev/null +++ b/deps/netdisco/const.py @@ -0,0 +1,15 @@ +"""Constants of services that can be discovered.""" + +BELKIN_WEMO = "belkin_wemo" +DLNA = "DLNA" +GOOGLE_CAST = "google_cast" +PHILIPS_HUE = "philips_hue" +PMS = 'plex_mediaserver' +LMS = 'logitech_mediaserver' +NETGEAR_ROUTER = "netgear_router" +SONOS = "sonos" +PANASONIC_VIERA = "panasonic_viera" +SABNZBD = 'sabnzbd' +KODI = 'kodi' +HOME_ASSISTANT = "home_assistant" +MYSTROM = 'mystrom' diff --git a/deps/netdisco/discoverables/DLNA.py b/deps/netdisco/discoverables/DLNA.py new file mode 100644 index 00000000..a6530877 --- /dev/null +++ b/deps/netdisco/discoverables/DLNA.py @@ -0,0 +1,11 @@ +"""Discover DLNA services.""" +from . import SSDPDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(SSDPDiscoverable): + """Add support for discovering DLNA services.""" + + def get_entries(self): + """Get all the DLNA service uPnP entries.""" + return self.find_by_st("urn:schemas-upnp-org:device:MediaServer:1") diff --git a/deps/netdisco/discoverables/__init__.py b/deps/netdisco/discoverables/__init__.py new file mode 100644 index 00000000..d510a901 --- /dev/null +++ b/deps/netdisco/discoverables/__init__.py @@ -0,0 +1,133 @@ +"""Provides helpful stuff for discoverables.""" +# pylint: disable=abstract-method + + +class BaseDiscoverable(object): + """Base class for discoverable services or device types.""" + + def is_discovered(self): + """Return True if it is discovered.""" + return len(self.get_entries()) > 0 + + def get_info(self): + """Return a list with the important info for each item. + + Uses self.info_from_entry internally. + """ + return [self.info_from_entry(entry) for entry in self.get_entries()] + + # pylint: disable=no-self-use + def info_from_entry(self, entry): + """Return an object with important info from the entry.""" + return entry + + # pylint: disable=no-self-use + def get_entries(self): + """Return all the discovered entries.""" + raise NotImplementedError() + + +class SSDPDiscoverable(BaseDiscoverable): + """uPnP discoverable base class.""" + + def __init__(self, netdis): + """Initialize SSDPDiscoverable.""" + self.netdis = netdis + + def get_info(self): + """Get most important info, by default the description location.""" + return list(set( + self.info_from_entry(entry) for entry in self.get_entries())) + + def info_from_entry(self, entry): + """Get most important info, by default the description location.""" + return entry.values['location'] + + # Helper functions + + # pylint: disable=invalid-name + def find_by_st(self, st): + """Find entries by ST (the device identifier).""" + return self.netdis.ssdp.find_by_st(st) + + def find_by_device_description(self, values): + """Find entries based on values from their description.""" + return self.netdis.ssdp.find_by_device_description(values) + + +class MDNSDiscoverable(BaseDiscoverable): + """mDNS Discoverable base class.""" + + def __init__(self, netdis, typ): + """Initialize MDNSDiscoverable.""" + self.netdis = netdis + self.typ = typ + self.services = {} + + netdis.mdns.register_service(self) + + def reset(self): + """Reset found services.""" + self.services.clear() + + def is_discovered(self): + """Return True if any device has been discovered.""" + return len(self.services) > 0 + + # pylint: disable=unused-argument + def remove_service(self, zconf, typ, name): + """Callback when a service is removed.""" + self.services.pop(name, None) + + def add_service(self, zconf, typ, name): + """Callback when a service is found.""" + service = None + tries = 0 + while service is None and tries < 3: + service = zconf.get_service_info(typ, name) + tries += 1 + + if service is not None: + self.services[name] = service + + def get_entries(self): + """Return all found services.""" + return self.services.values() + + def info_from_entry(self, entry): + """Return most important info from mDNS entries.""" + return (self.ip_from_host(entry.server), entry.port) + + def ip_from_host(self, host): + """Attempt to return the ip address from an mDNS host. + + Return host if failed. + """ + ips = self.netdis.mdns.zeroconf.cache.entries_with_name(host.lower()) + + return repr(ips[0]) if ips else host + + +class GDMDiscoverable(BaseDiscoverable): + """GDM discoverable base class.""" + + def __init__(self, netdis): + """Initialize GDMDiscoverable.""" + self.netdis = netdis + + def get_info(self): + """Get most important info, by default the description location.""" + return [self.info_from_entry(entry) for entry in self.get_entries()] + + def info_from_entry(self, entry): + """Get most important info, by default the description location.""" + return 'https://%s:%s/' % (entry.values['location'], + entry.values['port']) + + def find_by_content_type(self, value): + """Find entries based on values from their content_type.""" + return self.netdis.gdm.find_by_content_type(value) + + def find_by_data(self, values): + """Find entries based on values from any returned field.""" + return self.netdis.gdm.find_by_data(values) diff --git a/deps/netdisco/discoverables/__pycache__/DLNA.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/DLNA.cpython-34.pyc new file mode 100644 index 00000000..5a6b9ae7 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/DLNA.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/__init__.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..6c040695 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/belkin_wemo.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/belkin_wemo.cpython-34.pyc new file mode 100644 index 00000000..ee38e974 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/belkin_wemo.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/directv.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/directv.cpython-34.pyc new file mode 100644 index 00000000..5af90f77 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/directv.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/google_cast.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/google_cast.cpython-34.pyc new file mode 100644 index 00000000..a6bb0bca Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/google_cast.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/home_assistant.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/home_assistant.cpython-34.pyc new file mode 100644 index 00000000..d355f3f0 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/home_assistant.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/homekit.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/homekit.cpython-34.pyc new file mode 100644 index 00000000..bb21f8c6 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/homekit.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/kodi.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/kodi.cpython-34.pyc new file mode 100644 index 00000000..6cc7a3a6 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/kodi.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/logitech_mediaserver.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/logitech_mediaserver.cpython-34.pyc new file mode 100644 index 00000000..28208853 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/logitech_mediaserver.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/mystrom.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/mystrom.cpython-34.pyc new file mode 100644 index 00000000..ade509ef Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/mystrom.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/netgear_router.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/netgear_router.cpython-34.pyc new file mode 100644 index 00000000..7965ff55 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/netgear_router.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/panasonic_viera.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/panasonic_viera.cpython-34.pyc new file mode 100644 index 00000000..2b7c330a Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/panasonic_viera.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/philips_hue.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/philips_hue.cpython-34.pyc new file mode 100644 index 00000000..dc261a88 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/philips_hue.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/plex_mediaserver.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/plex_mediaserver.cpython-34.pyc new file mode 100644 index 00000000..07f3e87e Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/plex_mediaserver.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/roku.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/roku.cpython-34.pyc new file mode 100644 index 00000000..1cc581e0 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/roku.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/sabnzbd.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/sabnzbd.cpython-34.pyc new file mode 100644 index 00000000..d6a11b9a Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/sabnzbd.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/sonos.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/sonos.cpython-34.pyc new file mode 100644 index 00000000..32965359 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/sonos.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/tellstick.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/tellstick.cpython-34.pyc new file mode 100644 index 00000000..e831f461 Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/tellstick.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/__pycache__/webos_tv.cpython-34.pyc b/deps/netdisco/discoverables/__pycache__/webos_tv.cpython-34.pyc new file mode 100644 index 00000000..13627e7b Binary files /dev/null and b/deps/netdisco/discoverables/__pycache__/webos_tv.cpython-34.pyc differ diff --git a/deps/netdisco/discoverables/belkin_wemo.py b/deps/netdisco/discoverables/belkin_wemo.py new file mode 100644 index 00000000..6dd81855 --- /dev/null +++ b/deps/netdisco/discoverables/belkin_wemo.py @@ -0,0 +1,19 @@ +"""Discover Belkin Wemo devices.""" +from . import SSDPDiscoverable + + +class Discoverable(SSDPDiscoverable): + """Add support for discovering Belkin WeMo platform devices.""" + + def info_from_entry(self, entry): + """Return most important info from a uPnP entry.""" + device = entry.description['device'] + + return (device['friendlyName'], device['modelName'], + entry.values['location'], device.get('macAddress', ''), + device['serialNumber']) + + def get_entries(self): + """Return all Belkin Wemo entries.""" + return self.find_by_device_description( + {'manufacturer': 'Belkin International Inc.'}) diff --git a/deps/netdisco/discoverables/directv.py b/deps/netdisco/discoverables/directv.py new file mode 100644 index 00000000..1423eaec --- /dev/null +++ b/deps/netdisco/discoverables/directv.py @@ -0,0 +1,22 @@ +"""Discover DirecTV Receivers.""" +from netdisco.util import urlparse +from . import SSDPDiscoverable + + +class Discoverable(SSDPDiscoverable): + """Add support for discovering DirecTV Receivers.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + url = urlparse(entry.values['location']) + + device = entry.description['device'] + + return url.hostname, device['serialNumber'] + + def get_entries(self): + """Get all the DirecTV uPnP entries.""" + return self.find_by_device_description({ + "manufacturer": "DIRECTV", + "deviceType": "urn:schemas-upnp-org:device:MediaServer:1" + }) diff --git a/deps/netdisco/discoverables/google_cast.py b/deps/netdisco/discoverables/google_cast.py new file mode 100644 index 00000000..6355fd83 --- /dev/null +++ b/deps/netdisco/discoverables/google_cast.py @@ -0,0 +1,11 @@ +"""Discover devices that implement the Google Cast platform.""" +from . import MDNSDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(MDNSDiscoverable): + """Add support for discovering Google Cast platform devices.""" + + def __init__(self, nd): + """Initialize the Cast discovery.""" + super(Discoverable, self).__init__(nd, '_googlecast._tcp.local.') diff --git a/deps/netdisco/discoverables/home_assistant.py b/deps/netdisco/discoverables/home_assistant.py new file mode 100644 index 00000000..8736f9e2 --- /dev/null +++ b/deps/netdisco/discoverables/home_assistant.py @@ -0,0 +1,20 @@ +"""Discover Home Assistant servers.""" +from . import MDNSDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(MDNSDiscoverable): + """Add support for discovering Home Assistant instances.""" + + def __init__(self, nd): + super(Discoverable, self).__init__(nd, '_home-assistant._tcp.local.') + + def info_from_entry(self, entry): + """Returns most important info from mDNS entries.""" + return (entry.properties.get(b'base_url').decode('utf-8'), + entry.properties.get(b'version').decode('utf-8'), + entry.properties.get(b'requires_api_password')) + + def get_info(self): + """Get details from Home Assistant instances.""" + return [self.info_from_entry(entry) for entry in self.get_entries()] diff --git a/deps/netdisco/discoverables/homekit.py b/deps/netdisco/discoverables/homekit.py new file mode 100644 index 00000000..ebd86539 --- /dev/null +++ b/deps/netdisco/discoverables/homekit.py @@ -0,0 +1,21 @@ +"""Discover myStrom devices.""" +from . import MDNSDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(MDNSDiscoverable): + """Add support for discovering myStrom switches.""" + + def __init__(self, nd): + super(Discoverable, self).__init__(nd, '_hap._tcp.local.') + + def info_from_entry(self, entry): + """Return the most important info from mDNS entries.""" + info = {key.decode('utf-8'): value.decode('utf-8') + for key, value in entry.properties.items()} + info['host'] = 'http://{}'.format(self.ip_from_host(entry.server)) + return info + + def get_info(self): + """Get details from myStrom devices.""" + return [self.info_from_entry(entry) for entry in self.get_entries()] diff --git a/deps/netdisco/discoverables/kodi.py b/deps/netdisco/discoverables/kodi.py new file mode 100644 index 00000000..86ce7309 --- /dev/null +++ b/deps/netdisco/discoverables/kodi.py @@ -0,0 +1,20 @@ +"""Discover Kodi servers.""" +from . import MDNSDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(MDNSDiscoverable): + """Add support for discovering Kodi.""" + + def __init__(self, nd): + """Initialize the Kodi discovery.""" + super(Discoverable, self).__init__(nd, '_http._tcp.local.') + + def info_from_entry(self, entry): + """Return most important info from mDNS entries.""" + return (self.ip_from_host(entry.server), entry.port) + + def get_info(self): + """Get all the Kodi details.""" + return [self.info_from_entry(entry) for entry in self.get_entries() + if entry.name.startswith('Kodi ')] diff --git a/deps/netdisco/discoverables/logitech_mediaserver.py b/deps/netdisco/discoverables/logitech_mediaserver.py new file mode 100644 index 00000000..ea4c9804 --- /dev/null +++ b/deps/netdisco/discoverables/logitech_mediaserver.py @@ -0,0 +1,14 @@ +"""Discover Logitech Media Server.""" +from . import BaseDiscoverable + + +class Discoverable(BaseDiscoverable): + """Add support for discovering Logitech Media Server.""" + + def __init__(self, netdis): + """Initialize Logitech Media Server discovery.""" + self.netdis = netdis + + def get_entries(self): + """Get all the Logitech Media Server details.""" + return [entry['from'] for entry in self.netdis.lms.entries] diff --git a/deps/netdisco/discoverables/mystrom.py b/deps/netdisco/discoverables/mystrom.py new file mode 100644 index 00000000..50acf018 --- /dev/null +++ b/deps/netdisco/discoverables/mystrom.py @@ -0,0 +1,20 @@ +"""Discover myStrom devices.""" +from . import MDNSDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(MDNSDiscoverable): + """Add support for discovering myStrom switches.""" + + def __init__(self, nd): + super(Discoverable, self).__init__(nd, '_hap._tcp.local.') + + def info_from_entry(self, entry): + """Return the most important info from mDNS entries.""" + return (entry.properties.get(b'md').decode('utf-8'), + 'http://{}'.format(self.ip_from_host(entry.server)), + entry.properties.get(b'id').decode('utf-8')) + + def get_info(self): + """Get details from myStrom devices.""" + return [self.info_from_entry(entry) for entry in self.get_entries()] diff --git a/deps/netdisco/discoverables/netgear_router.py b/deps/netdisco/discoverables/netgear_router.py new file mode 100644 index 00000000..a0e955ed --- /dev/null +++ b/deps/netdisco/discoverables/netgear_router.py @@ -0,0 +1,20 @@ +"""Discover Netgear routers.""" +from netdisco.util import urlparse +from . import SSDPDiscoverable + + +class Discoverable(SSDPDiscoverable): + """Add support for discovering Netgear routers.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + url = urlparse(entry.values['location']) + + return (entry.description['device']['modelNumber'], url.hostname) + + def get_entries(self): + """Get all the Netgear uPnP entries.""" + return self.find_by_device_description({ + "manufacturer": "NETGEAR, Inc.", + "deviceType": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" + }) diff --git a/deps/netdisco/discoverables/panasonic_viera.py b/deps/netdisco/discoverables/panasonic_viera.py new file mode 100644 index 00000000..b98b7da0 --- /dev/null +++ b/deps/netdisco/discoverables/panasonic_viera.py @@ -0,0 +1,17 @@ +"""Discover Panasonic Viera TV devices.""" +from netdisco.util import urlparse +from . import SSDPDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(SSDPDiscoverable): + """Add support for discovering Viera TV devices.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + parsed = urlparse(entry.values['location']) + return '{}:{}'.format(parsed.hostname, parsed.port) + + def get_entries(self): + """Get all the Viera TV device uPnP entries.""" + return self.find_by_st("urn:panasonic-com:service:p00NetworkControl:1") diff --git a/deps/netdisco/discoverables/philips_hue.py b/deps/netdisco/discoverables/philips_hue.py new file mode 100644 index 00000000..062d4693 --- /dev/null +++ b/deps/netdisco/discoverables/philips_hue.py @@ -0,0 +1,20 @@ +"""Discover Philips Hue bridges.""" +from . import SSDPDiscoverable + + +class Discoverable(SSDPDiscoverable): + """Add support for discovering Philips Hue bridges.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + desc = entry.description + + return desc['device']['friendlyName'], desc['URLBase'] + + def get_entries(self): + """Get all the Hue bridge uPnP entries.""" + # Hub models for year 2012 and 2015 + return self.find_by_device_description({ + "manufacturer": "Royal Philips Electronics", + "modelNumber": ["929000226503", "BSB002"] + }) diff --git a/deps/netdisco/discoverables/plex_mediaserver.py b/deps/netdisco/discoverables/plex_mediaserver.py new file mode 100644 index 00000000..5d9030c3 --- /dev/null +++ b/deps/netdisco/discoverables/plex_mediaserver.py @@ -0,0 +1,15 @@ +"""Discover PlexMediaServer.""" +from . import GDMDiscoverable + + +class Discoverable(GDMDiscoverable): + """Add support for discovering Plex Media Server.""" + + def info_from_entry(self, entry): + """Return most important info from a GDM entry.""" + return (entry['data']['Name'], + 'https://%s:%s' % (entry['from'][0], entry['data']['Port'])) + + def get_entries(self): + """Return all PMS entries.""" + return self.find_by_data({'Content-Type': 'plex/media-server'}) diff --git a/deps/netdisco/discoverables/roku.py b/deps/netdisco/discoverables/roku.py new file mode 100644 index 00000000..38724e95 --- /dev/null +++ b/deps/netdisco/discoverables/roku.py @@ -0,0 +1,16 @@ +"""Discover Roku players.""" +from netdisco.util import urlparse +from . import SSDPDiscoverable + + +class Discoverable(SSDPDiscoverable): + """Add support for discovering Roku media players.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + info = urlparse(entry.location) + return info.hostname, info.port + + def get_entries(self): + """Get all the Roku entries.""" + return self.find_by_st("roku:ecp") diff --git a/deps/netdisco/discoverables/sabnzbd.py b/deps/netdisco/discoverables/sabnzbd.py new file mode 100644 index 00000000..65b7ae12 --- /dev/null +++ b/deps/netdisco/discoverables/sabnzbd.py @@ -0,0 +1,21 @@ +"""Discover SABnzbd servers.""" +from . import MDNSDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(MDNSDiscoverable): + """Add support for discovering SABnzbd.""" + + def __init__(self, nd): + """Initialize the SABnzbd discovery.""" + super(Discoverable, self).__init__(nd, '_http._tcp.local.') + + def info_from_entry(self, entry): + """Return most important info from mDNS entries.""" + return (self.ip_from_host(entry.server), entry.port, + entry.properties.get('path', '/sabnzbd/')) + + def get_info(self): + """Get details of SABnzbd.""" + return [self.info_from_entry(entry) for entry in self.get_entries() + if entry.name.startswith('SABnzbd on')] diff --git a/deps/netdisco/discoverables/sonos.py b/deps/netdisco/discoverables/sonos.py new file mode 100644 index 00000000..c30ac587 --- /dev/null +++ b/deps/netdisco/discoverables/sonos.py @@ -0,0 +1,16 @@ +"""Discover Sonos devices.""" +from netdisco.util import urlparse +from . import SSDPDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(SSDPDiscoverable): + """Add support for discovering Sonos devices.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + return urlparse(entry.values['location']).hostname + + def get_entries(self): + """Get all the Sonos device uPnP entries.""" + return self.find_by_st("urn:schemas-upnp-org:device:ZonePlayer:1") diff --git a/deps/netdisco/discoverables/tellstick.py b/deps/netdisco/discoverables/tellstick.py new file mode 100644 index 00000000..727aa762 --- /dev/null +++ b/deps/netdisco/discoverables/tellstick.py @@ -0,0 +1,14 @@ +"""Discover Tellstick devices.""" +from . import BaseDiscoverable + + +class Discoverable(BaseDiscoverable): + """Add support for discovering a Tellstick device.""" + + def __init__(self, netdis): + """Initialize the Tellstick discovery.""" + self._netdis = netdis + + def get_entries(self): + """Get all the Tellstick details.""" + return self._netdis.tellstick.entries diff --git a/deps/netdisco/discoverables/webos_tv.py b/deps/netdisco/discoverables/webos_tv.py new file mode 100644 index 00000000..d1df7c62 --- /dev/null +++ b/deps/netdisco/discoverables/webos_tv.py @@ -0,0 +1,19 @@ +"""Discover LG WebOS TV devices.""" +from netdisco.util import urlparse +from . import SSDPDiscoverable + + +# pylint: disable=too-few-public-methods +class Discoverable(SSDPDiscoverable): + """Add support for discovering LG WebOS TV devices.""" + + def info_from_entry(self, entry): + """Return the most important info from a uPnP entry.""" + return urlparse(entry.values['location']).hostname + + def get_entries(self): + """Get all the LG WebOS TV device uPnP entries.""" + return self.find_by_device_description({ + "deviceType": "urn:dial-multiscreen-org:device:dial:1", + "friendlyName": "[LG] webOS TV" + }) diff --git a/deps/netdisco/discovery.py b/deps/netdisco/discovery.py new file mode 100644 index 00000000..c49f4bf1 --- /dev/null +++ b/deps/netdisco/discovery.py @@ -0,0 +1,126 @@ +"""Combine all the different protocols into a simple interface.""" +from __future__ import print_function +import logging +import os +import importlib + +from .ssdp import SSDP +from .mdns import MDNS +from .gdm import GDM +from .lms import LMS +from .tellstick import Tellstick + +_LOGGER = logging.getLogger(__name__) + + +class NetworkDiscovery(object): + """Scan the network for devices. + + mDNS scans in a background thread. + SSDP scans in the foreground. + GDM scans in the foreground. + LMS scans in the foreground. + Tellstick scans in the foreground + + start: is ready to scan + scan: scan the network + discover: parse scanned data + get_in + """ + + # pylint: disable=too-many-instance-attributes + def __init__(self, limit_discovery=None): + """Initialize the discovery.""" + self.limit_discovery = limit_discovery + + self.mdns = MDNS() + self.ssdp = SSDP() + self.gdm = GDM() + self.lms = LMS() + self.tellstick = Tellstick() + self.discoverables = {} + + self._load_device_support() + + self.is_discovering = False + + def scan(self): + """Start and tells scanners to scan.""" + if not self.is_discovering: + self.mdns.start() + self.is_discovering = True + + self.ssdp.scan() + self.gdm.scan() + self.lms.scan() + self.tellstick.scan() + + def stop(self): + """Turn discovery off.""" + if not self.is_discovering: + return + + self.mdns.stop() + + self.is_discovering = False + + def discover(self): + """Return a list of discovered devices and services.""" + self._check_enabled() + + return [dis for dis, checker in self.discoverables.items() + if checker.is_discovered()] + + def get_info(self, dis): + """Get a list with the most important info about discovered type.""" + return self.discoverables[dis].get_info() + + def get_entries(self, dis): + """Get a list with all info about a discovered type.""" + return self.discoverables[dis].get_entries() + + def _check_enabled(self): + """Raise RuntimeError if discovery is disabled.""" + if not self.is_discovering: + raise RuntimeError("NetworkDiscovery is disabled") + + def _load_device_support(self): + """Load the devices and services that can be discovered.""" + self.discoverables = {} + + discoverables_format = __name__.rsplit('.', 1)[0] + '.discoverables.{}' + + for module_name in os.listdir(os.path.join(os.path.dirname(__file__), + 'discoverables')): + if module_name[-3:] != '.py' or module_name == '__init__.py': + continue + + module_name = module_name[:-3] + + if self.limit_discovery is not None and \ + module_name not in self.limit_discovery: + continue + + module = importlib.import_module( + discoverables_format.format(module_name)) + + self.discoverables[module_name] = module.Discoverable(self) + + def print_raw_data(self): + """Helper method to show what is discovered in your network.""" + from pprint import pprint + + print("Zeroconf") + pprint(self.mdns.entries) + print("") + print("SSDP") + pprint(self.ssdp.entries) + print("") + print("GDM") + pprint(self.gdm.entries) + print("") + print("LMS") + pprint(self.lms.entries) + print("") + print("Tellstick") + pprint(self.tellstick.entries) diff --git a/deps/netdisco/gdm.py b/deps/netdisco/gdm.py new file mode 100644 index 00000000..aa7355df --- /dev/null +++ b/deps/netdisco/gdm.py @@ -0,0 +1,110 @@ +""" +Support for discovery using GDM (Good Day Mate), multicast protocol by Plex. + +Inspired by + hippojay's plexGDM: + https://github.com/hippojay/script.plexbmc.helper/resources/lib/plexgdm.py + iBaa's PlexConnect: https://github.com/iBaa/PlexConnect/PlexAPI.py +""" +import threading +import socket + + +class GDM(object): + """Base class to discover GDM services.""" + + def __init__(self): + self.entries = [] + self.last_scan = None + self._lock = threading.RLock() + + def scan(self): + """Scan the network.""" + with self._lock: + self.update() + + def all(self): + """Return all found entries. + + Will scan for entries if not scanned recently. + """ + self.scan() + return list(self.entries) + + def find_by_content_type(self, value): + """Return a list of entries that match the content_type.""" + self.scan() + return [entry for entry in self.entries + if value in entry['data']['Content_Type']] + + def find_by_data(self, values): + """Return a list of entries that match the search parameters.""" + self.scan() + return [entry for entry in self.entries + if all(item in entry['data'].items() + for item in values.items())] + + def update(self): + """Scan for new GDM services. + + Example of the dict list returned by this function: + [{'data': 'Content-Type: plex/media-server' + 'Host: 53f4b5b6023d41182fe88a99b0e714ba.plex.direct' + 'Name: myfirstplexserver' + 'Port: 32400' + 'Resource-Identifier: 646ab0aa8a01c543e94ba975f6fd6efadc36b7' + 'Updated-At: 1444852697' + 'Version: 0.9.12.13.1464-4ccd2ca' + 'from': ('10.10.10.100', 32414)}] + """ + + gdm_ip = '239.0.0.250' # multicast to PMS + gdm_port = 32414 + gdm_msg = 'M-SEARCH * HTTP/1.0'.encode('ascii') + gdm_timeout = 1 + + self.entries = [] + + # setup socket for discovery -> multicast message + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.settimeout(gdm_timeout) + + # Set the time-to-live for messages for local network + sock.setsockopt(socket.IPPROTO_IP, + socket.IP_MULTICAST_TTL, + gdm_timeout) + + try: + # Send data to the multicast group + sock.sendto(gdm_msg, (gdm_ip, gdm_port)) + + # Look for responses from all recipients + while True: + try: + data, server = sock.recvfrom(1024) + data = data.decode('utf-8') + if '200 OK' in data.splitlines()[0]: + data = {k: v.strip() for (k, v) in ( + line.split(':') for line in + data.splitlines() if ':' in line)} + self.entries.append({'data': data, + 'from': server}) + except socket.timeout: + break + finally: + sock.close() + + +def main(): + """Test GDM discovery.""" + # pylint: disable=invalid-name + from pprint import pprint + + gdm = GDM() + + pprint("Scanning GDM...") + gdm.update() + pprint(gdm.entries) + +if __name__ == "__main__": + main() diff --git a/deps/netdisco/lms.py b/deps/netdisco/lms.py new file mode 100644 index 00000000..166ad6c4 --- /dev/null +++ b/deps/netdisco/lms.py @@ -0,0 +1,70 @@ +"""Squeezebox/Logitech Media server discovery.""" +import socket +import threading + +DISCOVERY_PORT = 3483 +DEFAULT_DISCOVERY_TIMEOUT = 5 + + +class LMS(object): + """Base class to discover Logitech Media servers.""" + + def __init__(self): + """Initialize the Logitech discovery.""" + self.entries = [] + self.last_scan = None + self._lock = threading.RLock() + + def scan(self): + """Scan the network.""" + with self._lock: + self.update() + + def all(self): + """Scan and return all found entries.""" + self.scan() + return list(self.entries) + + def update(self): + """Scan network for Logitech Media Servers.""" + lms_ip = '' + lms_port = DISCOVERY_PORT + lms_msg = b"d................." + lms_timeout = DEFAULT_DISCOVERY_TIMEOUT + + entries = [] + + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.settimeout(lms_timeout) + sock.bind(('', 0)) + + try: + sock.sendto(lms_msg, (lms_ip, lms_port)) + + while True: + try: + data, server = sock.recvfrom(1024) + if data.startswith(b'D'): + entries.append({'data': data, + 'from': server}) + except socket.timeout: + break + finally: + sock.close() + self.entries = entries + + +def main(): + """Test LMS discovery.""" + from pprint import pprint + + # pylint: disable=invalid-name + lms = LMS() + + pprint("Scanning for Logitech Media Servers...") + lms.update() + pprint(lms.entries) + +if __name__ == "__main__": + main() diff --git a/deps/netdisco/mdns.py b/deps/netdisco/mdns.py new file mode 100644 index 00000000..ae638212 --- /dev/null +++ b/deps/netdisco/mdns.py @@ -0,0 +1,40 @@ +"""Add support for discovering mDNS services.""" +import zeroconf + + +class MDNS(object): + """Base class to discover mDNS services.""" + + def __init__(self): + """Initialize the discovery.""" + self.zeroconf = None + self.services = [] + self._browsers = [] + + def register_service(self, service): + """Register a mDNS service.""" + self.services.append(service) + + def start(self): + """Start discovery.""" + self.zeroconf = zeroconf.Zeroconf() + + for service in self.services: + self._browsers.append( + zeroconf.ServiceBrowser(self.zeroconf, service.typ, service)) + + def stop(self): + """Stop discovering.""" + while self._browsers: + self._browsers.pop().cancel() + + for service in self.services: + service.reset() + + self.zeroconf.close() + self.zeroconf = None + + @property + def entries(self): + """Return all entries in the cache.""" + return self.zeroconf.cache.entries() diff --git a/deps/netdisco/service.py b/deps/netdisco/service.py new file mode 100644 index 00000000..b3101efd --- /dev/null +++ b/deps/netdisco/service.py @@ -0,0 +1,91 @@ +"""Provide service that scans the network in intervals.""" +import logging +import threading +import time +from collections import defaultdict + +from .discovery import NetworkDiscovery + +DEFAULT_INTERVAL = 300 # seconds + +_LOGGER = logging.getLogger(__name__) + + +class DiscoveryService(threading.Thread): + """Service that will scan the network for devices each `interval` seconds. + + Add listeners to the service to be notified of new services found. + """ + + def __init__(self, interval=DEFAULT_INTERVAL, limit_discovery=None): + """Initialize the discovery.""" + super(DiscoveryService, self).__init__() + + # Scanning interval + self.interval = interval + + # Limit discovery to the following types + self.limit_discovery = limit_discovery + + # Listeners for new services + self.listeners = [] + + # To track when we have to stop + self._stop = threading.Event() + + # Tell Python not to wait till this thread exits + self.daemon = True + + # The discovery object + self.discovery = None + + # Dict to keep track of found services. We do not want to + # broadcast the same found service twice. + self._found = defaultdict(list) + + def add_listener(self, listener): + """Add a listener for new services.""" + self.listeners.append(listener) + + def stop(self): + """Stop the service.""" + self._stop.set() + + def run(self): + """Start the discovery service.""" + self.discovery = NetworkDiscovery(self.limit_discovery) + + while True: + self._scan() + + seconds_since_scan = 0 + + while seconds_since_scan < self.interval: + if self._stop.is_set(): + return + + time.sleep(1) + seconds_since_scan += 1 + + def _scan(self): + """Scan for new devices.""" + _LOGGER.info("Scanning") + self.discovery.scan() + + for disc in self.discovery.discover(): + for service in self.discovery.get_info(disc): + self._service_found(disc, service) + + self.discovery.stop() + + def _service_found(self, disc, service): + """Tell listeners a service was found.""" + if service not in self._found[disc]: + self._found[disc].append(service) + + for listener in self.listeners: + try: + listener(disc, service) + except Exception: # pylint: disable=broad-except + _LOGGER.exception( + "Error calling listener") diff --git a/deps/netdisco/ssdp.py b/deps/netdisco/ssdp.py new file mode 100644 index 00000000..12e29b24 --- /dev/null +++ b/deps/netdisco/ssdp.py @@ -0,0 +1,278 @@ +"""Module that implements SSDP protocol.""" +import re +import select +import socket +import logging +from datetime import datetime, timedelta +import threading +import xml.etree.ElementTree as ElementTree + +import requests + +from netdisco.util import etree_to_dict, interface_addresses + +DISCOVER_TIMEOUT = 5 +SSDP_MX = 1 +SSDP_TARGET = ("239.255.255.250", 1900) + +RESPONSE_REGEX = re.compile(r'\n(.*)\: (.*)\r') + +MIN_TIME_BETWEEN_SCANS = timedelta(seconds=59) + +# Devices and services +ST_ALL = "ssdp:all" + +# Devices only, some devices will only respond to this query +ST_ROOTDEVICE = "upnp:rootdevice" + + +class SSDP(object): + """Control the scanning of uPnP devices and services and caches output.""" + + def __init__(self): + """Initialize the discovery.""" + self.entries = [] + self.last_scan = None + self._lock = threading.RLock() + + def scan(self): + """Scan the network.""" + with self._lock: + self.update() + + def all(self): + """Return all found entries. + + Will scan for entries if not scanned recently. + """ + with self._lock: + self.update() + + return list(self.entries) + + # pylint: disable=invalid-name + def find_by_st(self, st): + """Return a list of entries that match the ST.""" + with self._lock: + self.update() + + return [entry for entry in self.entries + if entry.st == st] + + def find_by_device_description(self, values): + """Return a list of entries that match the description. + + Pass in a dict with values to match against the device tag in the + description. + """ + with self._lock: + self.update() + + return [entry for entry in self.entries + if entry.match_device_description(values)] + + def update(self, force_update=False): + """Scan for new uPnP devices and services.""" + with self._lock: + if self.last_scan is None or force_update or \ + datetime.now()-self.last_scan > MIN_TIME_BETWEEN_SCANS: + + self.remove_expired() + + # Wemo does not respond to a query for all devices+services + # but only to a query for just root devices. + self.entries.extend( + entry for entry in scan() + scan(ST_ROOTDEVICE) + if entry not in self.entries) + + self.last_scan = datetime.now() + + def remove_expired(self): + """Filter out expired entries.""" + with self._lock: + self.entries = [entry for entry in self.entries + if not entry.is_expired] + + +class UPNPEntry(object): + """Found uPnP entry.""" + + DESCRIPTION_CACHE = {'_NO_LOCATION': {}} + + def __init__(self, values): + """Initialize the discovery.""" + self.values = values + self.created = datetime.now() + + if 'cache-control' in self.values: + cache_seconds = int(self.values['cache-control'].split('=')[1]) + + self.expires = self.created + timedelta(seconds=cache_seconds) + else: + self.expires = None + + @property + def is_expired(self): + """Return if the entry is expired or not.""" + return self.expires is not None and datetime.now() > self.expires + + # pylint: disable=invalid-name + @property + def st(self): + """Return ST value.""" + return self.values.get('st') + + @property + def location(self): + """Return Location value.""" + return self.values.get('location') + + @property + def description(self): + """Return the description from the uPnP entry.""" + url = self.values.get('location', '_NO_LOCATION') + + if url not in UPNPEntry.DESCRIPTION_CACHE: + try: + xml = requests.get(url).text + + tree = ElementTree.fromstring(xml) + + UPNPEntry.DESCRIPTION_CACHE[url] = \ + etree_to_dict(tree).get('root', {}) + except requests.RequestException: + logging.getLogger(__name__).error( + "Error fetching description at %s", url) + + UPNPEntry.DESCRIPTION_CACHE[url] = {} + + except ElementTree.ParseError: + logging.getLogger(__name__).error( + "Found malformed XML at %s: %s", url, xml) + + UPNPEntry.DESCRIPTION_CACHE[url] = {} + + return UPNPEntry.DESCRIPTION_CACHE[url] + + def match_device_description(self, values): + """Fetch description and matches against it. + + Values should only contain lowercase keys. + """ + device = self.description.get('device') + + if device is None: + return False + + return all(device.get(key) in val + if isinstance(val, list) + else val == device.get(key) + for key, val in values.items()) + + @classmethod + def from_response(cls, response): + """Create a uPnP entry from a response.""" + return UPNPEntry({key.lower(): item for key, item + in RESPONSE_REGEX.findall(response)}) + + def __eq__(self, other): + """Return the comparison.""" + return (self.__class__ == other.__class__ and + self.values == other.values) + + def __repr__(self): + """Return the entry.""" + return "".format( + self.values.get('st', ''), self.values.get('location', '')) + + +# pylint: disable=invalid-name,too-many-locals +def scan(st=None, timeout=DISCOVER_TIMEOUT, max_entries=None): + """Send a message over the network to discover uPnP devices. + + Inspired by Crimsdings + https://github.com/crimsdings/ChromeCast/blob/master/cc_discovery.py + + Protocol explanation: + https://embeddedinn.wordpress.com/tutorials/upnp-device-architecture/ + """ + # pylint: disable=too-many-nested-blocks,too-many-branches + ssdp_st = st or ST_ALL + ssdp_request = "\r\n".join([ + 'M-SEARCH * HTTP/1.1', + 'HOST: 239.255.255.250:1900', + 'MAN: "ssdp:discover"', + 'MX: {:d}'.format(SSDP_MX), + 'ST: {}'.format(ssdp_st), + '', '']).encode('utf-8') + + stop_wait = datetime.now() + timedelta(0, timeout) + + sockets = [] + for addr in interface_addresses(): + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + + # Set the time-to-live for messages for local network + sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 1) + sock.bind((addr, 0)) + + sockets.append(sock) + except socket.error: + pass + + entries = [] + for sock in [s for s in sockets]: + try: + sock.sendto(ssdp_request, SSDP_TARGET) + sock.setblocking(False) + except socket.error: + sockets.remove(sock) + sock.close() + + try: + while sockets: + time_diff = stop_wait - datetime.now() + seconds_left = time_diff.total_seconds() + if seconds_left <= 0: + break + + ready = select.select(sockets, [], [], seconds_left)[0] + + for sock in ready: + try: + response = sock.recv(1024).decode("utf-8") + except socket.error: + logging.getLogger(__name__).exception( + "Socket error while discovering SSDP devices") + sockets.remove(sock) + sock.close() + continue + + entry = UPNPEntry.from_response(response) + + if (st is None or entry.st == st) and entry not in entries: + entries.append(entry) + + if max_entries and len(entries) == max_entries: + raise StopIteration + + except StopIteration: + pass + + finally: + for s in sockets: + s.close() + + return entries + + +def main(): + """Test SSDP discovery.""" + from pprint import pprint + + pprint("Scanning SSDP..") + pprint(scan()) + +if __name__ == "__main__": + main() diff --git a/deps/netdisco/tellstick.py b/deps/netdisco/tellstick.py new file mode 100644 index 00000000..0936d8f8 --- /dev/null +++ b/deps/netdisco/tellstick.py @@ -0,0 +1,68 @@ +"""Tellstick device discovery.""" +import socket +import threading +from datetime import timedelta + + +DISCOVERY_PORT = 30303 +DISCOVERY_ADDRESS = '' +DISCOVERY_PAYLOAD = b"D" +DISCOVERY_TIMEOUT = timedelta(seconds=5) + + +class Tellstick(object): + """Base class to discover Tellstick devices.""" + + def __init__(self): + """Initialize the TEllstick discovery.""" + self.entries = [] + self._lock = threading.RLock() + + def scan(self): + """Scan the network.""" + with self._lock: + self.update() + + def all(self): + """Scan and return all found entries.""" + self.scan() + return self.entries + + def update(self): + """Scan network for Tellstick devices.""" + entries = [] + + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.settimeout(DISCOVERY_TIMEOUT.seconds) + sock.sendto(DISCOVERY_PAYLOAD, (DISCOVERY_ADDRESS, DISCOVERY_PORT)) + + while True: + try: + data, (address, _) = sock.recvfrom(1024) + entry = data.decode("ascii").split(":") + # expecting product, mac, activation code, version + if len(entry) != 4: + continue + entry = (address,) + tuple(entry) + entries.append(entry) + + except socket.timeout: + break + + self.entries = entries + + sock.close() + + +def main(): + """Test Tellstick discovery.""" + from pprint import pprint + tellstick = Tellstick() + pprint("Scanning for Tellstick devices..") + tellstick.update() + pprint(tellstick.entries) + +if __name__ == "__main__": + main() diff --git a/deps/netdisco/util.py b/deps/netdisco/util.py new file mode 100644 index 00000000..66623f7a --- /dev/null +++ b/deps/netdisco/util.py @@ -0,0 +1,51 @@ +"""Util functions used by Netdisco.""" +from collections import defaultdict + +# pylint: disable=unused-import, import-error, no-name-in-module +try: + # Py2 + from urlparse import urlparse # noqa +except ImportError: + # Py3 + from urllib.parse import urlparse # noqa +import netifaces + + +# Taken from http://stackoverflow.com/a/10077069 +# pylint: disable=invalid-name +def etree_to_dict(t): + """Convert an ETree object to a dict.""" + # strip namespace + tag_name = t.tag[t.tag.find("}")+1:] + + d = {tag_name: {} if t.attrib else None} + children = list(t) + if children: + dd = defaultdict(list) + for dc in map(etree_to_dict, children): + for k, v in dc.items(): + dd[k].append(v) + d = {tag_name: {k: v[0] if len(v) == 1 else v for k, v in dd.items()}} + if t.attrib: + d[tag_name].update(('@' + k, v) for k, v in t.attrib.items()) + if t.text: + text = t.text.strip() + if children or t.attrib: + if text: + d[tag_name]['#text'] = text + else: + d[tag_name] = text + return d + + +def interface_addresses(family=netifaces.AF_INET): + """Return local addresses of any associated network. + + Gathering of addresses which are bound to a local interface that has + broadcast (and probably multicast) capability. + """ + # pylint: disable=no-member + return [addr['addr'] + for i in netifaces.interfaces() + for addr in netifaces.ifaddresses(i).get(family) or [] + if 'broadcast' in addr] diff --git a/deps/netifaces-0.10.5.dist-info/DESCRIPTION.rst b/deps/netifaces-0.10.5.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..3b79ac60 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/DESCRIPTION.rst @@ -0,0 +1,196 @@ +netifaces 0.10.4 +================ + +.. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png + :target: https://drone.io/bitbucket.org/al45tair/netifaces/latest + :alt: Build Status + +1. What is this? +---------------- + +It's been annoying me for some time that there's no easy way to get the +address(es) of the machine's network interfaces from Python. There is +a good reason for this difficulty, which is that it is virtually impossible +to do so in a portable manner. However, it seems to me that there should +be a package you can easy_install that will take care of working out the +details of doing so on the machine you're using, then you can get on with +writing Python code without concerning yourself with the nitty gritty of +system-dependent low-level networking APIs. + +This package attempts to solve that problem. + +2. How do I use it? +------------------- + +First you need to install it, which you can do by typing:: + + tar xvzf netifaces-0.10.4.tar.gz + cd netifaces-0.10.4 + python setup.py install + +Once that's done, you'll need to start Python and do something like the +following:: + +>>> import netifaces + +Then if you enter + +>>> netifaces.interfaces() +['lo0', 'gif0', 'stf0', 'en0', 'en1', 'fw0'] + +you'll see the list of interface identifiers for your machine. + +You can ask for the addresses of a particular interface by doing + +>>> netifaces.ifaddresses('lo0') +{18: [{'addr': ''}], 2: [{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}], 30: [{'peer': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', 'addr': '::1'}, {'peer': '', 'netmask': 'ffff:ffff:ffff:ffff::', 'addr': 'fe80::1%lo0'}]} + +Hmmmm. That result looks a bit cryptic; let's break it apart and explain +what each piece means. It returned a dictionary, so let's look there first:: + + { 18: [...], 2: [...], 30: [...] } + +Each of the numbers refers to a particular address family. In this case, we +have three address families listed; on my system, 18 is ``AF_LINK`` (which means +the link layer interface, e.g. Ethernet), 2 is ``AF_INET`` (normal Internet +addresses), and 30 is ``AF_INET6`` (IPv6). + +But wait! Don't use these numbers in your code. The numeric values here are +system dependent; fortunately, I thought of that when writing netifaces, so +the module declares a range of values that you might need. e.g. + +>>> netifaces.AF_LINK +18 + +Again, on your system, the number may be different. + +So, what we've established is that the dictionary that's returned has one +entry for each address family for which this interface has an address. Let's +take a look at the ``AF_INET`` addresses now: + +>>> addrs = netifaces.ifaddresses('lo0') +>>> addrs[netifaces.AF_INET] +[{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}] + +You might be wondering why this value is a list. The reason is that it's +possible for an interface to have more than one address, even within the +same family. I'll say that again: *you can have more than one address of +the same type associated with each interface*. + +*Asking for "the" address of a particular interface doesn't make sense.* + +Right, so, we can see that this particular interface only has one address, +and, because it's a loopback interface, it's point-to-point and therefore +has a *peer* address rather than a broadcast address. + +Let's look at a more interesting interface. + +>>> addrs = netifaces.ifaddresses('en0') +>>> addrs[netifaces.AF_INET] +[{'broadcast': '10.15.255.255', 'netmask': '255.240.0.0', 'addr': '10.0.1.4'}, {'broadcast': '192.168.0.255', 'addr': '192.168.0.47'}] + +This interface has two addresses (see, I told you...) Both of them are +regular IPv4 addresses, although in one case the netmask has been changed +from its default. The netmask *may not* appear on your system if it's set +to the default for the address range. + +Because this interface isn't point-to-point, it also has broadcast addresses. + +Now, say we want, instead of the IP addresses, to get the MAC address; that +is, the hardware address of the Ethernet adapter running this interface. We +can do + +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a'}] + +Note that this may not be available on platforms without getifaddrs(), unless +they happen to implement ``SIOCGIFHWADDR``. Note also that you just get the +address; it's unlikely that you'll see anything else with an ``AF_LINK`` address. +Oh, and don't assume that all ``AF_LINK`` addresses are Ethernet; you might, for +instance, be on a Mac, in which case: + +>>> addrs = netifaces.ifaddresses('fw0') +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a:bc:de'}] + +No, that isn't an exceptionally long Ethernet MAC address---it's a FireWire +address. + +As of version 0.10.0, you can also obtain a list of gateways on your +machine: + +>>> netifaces.gateways() +{2: [('10.0.1.1', 'en0', True), ('10.2.1.1', 'en1', False)], 30: [('fe80::1', 'en0', True)], 'default': { 2: ('10.0.1.1', 'en0'), 30: ('fe80::1', 'en0') }} + +This dictionary is keyed on address family---in this case, ``AF_INET``---and +each entry is a list of gateways as ``(address, interface, is_default)`` tuples. +Notice that here we have two separate gateways for IPv4 (``AF_INET``); some +operating systems support configurations like this and can either route packets +based on their source, or based on administratively configured routing tables. + +For convenience, we also allow you to index the dictionary with the special +value ``'default'``, which returns a dictionary mapping address families to the +default gateway in each case. Thus you can get the default IPv4 gateway with + +>>> gws = netifaces.gateways() +>>> gws['default'][netifaces.AF_INET] +('10.0.1.1', 'en0') + +Do note that there may be no default gateway for any given address family; +this is currently very common for IPv6 and much less common for IPv4 but it +can happen even for ``AF_INET``. + +BTW, if you're trying to configure your machine to have multiple gateways for +the same address family, it's a very good idea to check the documentation for +your operating system *very* carefully, as some systems become extremely +confused or route packets in a non-obvious manner. + +I'm very interested in hearing from anyone (on any platform) for whom the +``gateways()`` method doesn't produce the expected results. It's quite +complicated extracting this information from the operating system (whichever +operating system we're talking about), and so I expect there's at least one +system out there where this just won't work. + +3. This is great! What platforms does it work on? +-------------------------------------------------- + +It gets regular testing on OS X, Linux and Windows. It has also been used +successfully on Solaris, and it's expected to work properly on other UNIX-like +systems as well. If you are running something that is not supported, and +wish to contribute a patch, please use BitBucket to send a pull request. + +4. What license is this under? +------------------------------ + +It's an MIT-style license. Here goes: + +Copyright (c) 2007-2014 Alastair Houghton + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +5. Why the jump to 0.10.0? +-------------------------- + +Because someone released a fork of netifaces with the version 0.9.0. +Hopefully skipping the version number should remove any confusion. In +addition starting with 0.10.0 Python 3 is now supported and other +features/bugfixes have been included as well. See the CHANGELOG for a +more complete list of changes. + + diff --git a/deps/netifaces-0.10.5.dist-info/INSTALLER b/deps/netifaces-0.10.5.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/netifaces-0.10.5.dist-info/METADATA b/deps/netifaces-0.10.5.dist-info/METADATA new file mode 100644 index 00000000..63752701 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/METADATA @@ -0,0 +1,216 @@ +Metadata-Version: 2.0 +Name: netifaces +Version: 0.10.5 +Summary: Portable network interface information. +Home-page: https://bitbucket.org/al45tair/netifaces +Author: Alastair Houghton +Author-email: alastair@alastairs-place.net +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: System :: Networking +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 + +netifaces 0.10.4 +================ + +.. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png + :target: https://drone.io/bitbucket.org/al45tair/netifaces/latest + :alt: Build Status + +1. What is this? +---------------- + +It's been annoying me for some time that there's no easy way to get the +address(es) of the machine's network interfaces from Python. There is +a good reason for this difficulty, which is that it is virtually impossible +to do so in a portable manner. However, it seems to me that there should +be a package you can easy_install that will take care of working out the +details of doing so on the machine you're using, then you can get on with +writing Python code without concerning yourself with the nitty gritty of +system-dependent low-level networking APIs. + +This package attempts to solve that problem. + +2. How do I use it? +------------------- + +First you need to install it, which you can do by typing:: + + tar xvzf netifaces-0.10.4.tar.gz + cd netifaces-0.10.4 + python setup.py install + +Once that's done, you'll need to start Python and do something like the +following:: + +>>> import netifaces + +Then if you enter + +>>> netifaces.interfaces() +['lo0', 'gif0', 'stf0', 'en0', 'en1', 'fw0'] + +you'll see the list of interface identifiers for your machine. + +You can ask for the addresses of a particular interface by doing + +>>> netifaces.ifaddresses('lo0') +{18: [{'addr': ''}], 2: [{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}], 30: [{'peer': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', 'addr': '::1'}, {'peer': '', 'netmask': 'ffff:ffff:ffff:ffff::', 'addr': 'fe80::1%lo0'}]} + +Hmmmm. That result looks a bit cryptic; let's break it apart and explain +what each piece means. It returned a dictionary, so let's look there first:: + + { 18: [...], 2: [...], 30: [...] } + +Each of the numbers refers to a particular address family. In this case, we +have three address families listed; on my system, 18 is ``AF_LINK`` (which means +the link layer interface, e.g. Ethernet), 2 is ``AF_INET`` (normal Internet +addresses), and 30 is ``AF_INET6`` (IPv6). + +But wait! Don't use these numbers in your code. The numeric values here are +system dependent; fortunately, I thought of that when writing netifaces, so +the module declares a range of values that you might need. e.g. + +>>> netifaces.AF_LINK +18 + +Again, on your system, the number may be different. + +So, what we've established is that the dictionary that's returned has one +entry for each address family for which this interface has an address. Let's +take a look at the ``AF_INET`` addresses now: + +>>> addrs = netifaces.ifaddresses('lo0') +>>> addrs[netifaces.AF_INET] +[{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}] + +You might be wondering why this value is a list. The reason is that it's +possible for an interface to have more than one address, even within the +same family. I'll say that again: *you can have more than one address of +the same type associated with each interface*. + +*Asking for "the" address of a particular interface doesn't make sense.* + +Right, so, we can see that this particular interface only has one address, +and, because it's a loopback interface, it's point-to-point and therefore +has a *peer* address rather than a broadcast address. + +Let's look at a more interesting interface. + +>>> addrs = netifaces.ifaddresses('en0') +>>> addrs[netifaces.AF_INET] +[{'broadcast': '10.15.255.255', 'netmask': '255.240.0.0', 'addr': '10.0.1.4'}, {'broadcast': '192.168.0.255', 'addr': '192.168.0.47'}] + +This interface has two addresses (see, I told you...) Both of them are +regular IPv4 addresses, although in one case the netmask has been changed +from its default. The netmask *may not* appear on your system if it's set +to the default for the address range. + +Because this interface isn't point-to-point, it also has broadcast addresses. + +Now, say we want, instead of the IP addresses, to get the MAC address; that +is, the hardware address of the Ethernet adapter running this interface. We +can do + +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a'}] + +Note that this may not be available on platforms without getifaddrs(), unless +they happen to implement ``SIOCGIFHWADDR``. Note also that you just get the +address; it's unlikely that you'll see anything else with an ``AF_LINK`` address. +Oh, and don't assume that all ``AF_LINK`` addresses are Ethernet; you might, for +instance, be on a Mac, in which case: + +>>> addrs = netifaces.ifaddresses('fw0') +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a:bc:de'}] + +No, that isn't an exceptionally long Ethernet MAC address---it's a FireWire +address. + +As of version 0.10.0, you can also obtain a list of gateways on your +machine: + +>>> netifaces.gateways() +{2: [('10.0.1.1', 'en0', True), ('10.2.1.1', 'en1', False)], 30: [('fe80::1', 'en0', True)], 'default': { 2: ('10.0.1.1', 'en0'), 30: ('fe80::1', 'en0') }} + +This dictionary is keyed on address family---in this case, ``AF_INET``---and +each entry is a list of gateways as ``(address, interface, is_default)`` tuples. +Notice that here we have two separate gateways for IPv4 (``AF_INET``); some +operating systems support configurations like this and can either route packets +based on their source, or based on administratively configured routing tables. + +For convenience, we also allow you to index the dictionary with the special +value ``'default'``, which returns a dictionary mapping address families to the +default gateway in each case. Thus you can get the default IPv4 gateway with + +>>> gws = netifaces.gateways() +>>> gws['default'][netifaces.AF_INET] +('10.0.1.1', 'en0') + +Do note that there may be no default gateway for any given address family; +this is currently very common for IPv6 and much less common for IPv4 but it +can happen even for ``AF_INET``. + +BTW, if you're trying to configure your machine to have multiple gateways for +the same address family, it's a very good idea to check the documentation for +your operating system *very* carefully, as some systems become extremely +confused or route packets in a non-obvious manner. + +I'm very interested in hearing from anyone (on any platform) for whom the +``gateways()`` method doesn't produce the expected results. It's quite +complicated extracting this information from the operating system (whichever +operating system we're talking about), and so I expect there's at least one +system out there where this just won't work. + +3. This is great! What platforms does it work on? +-------------------------------------------------- + +It gets regular testing on OS X, Linux and Windows. It has also been used +successfully on Solaris, and it's expected to work properly on other UNIX-like +systems as well. If you are running something that is not supported, and +wish to contribute a patch, please use BitBucket to send a pull request. + +4. What license is this under? +------------------------------ + +It's an MIT-style license. Here goes: + +Copyright (c) 2007-2014 Alastair Houghton + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +5. Why the jump to 0.10.0? +-------------------------- + +Because someone released a fork of netifaces with the version 0.9.0. +Hopefully skipping the version number should remove any confusion. In +addition starting with 0.10.0 Python 3 is now supported and other +features/bugfixes have been included as well. See the CHANGELOG for a +more complete list of changes. + + diff --git a/deps/netifaces-0.10.5.dist-info/RECORD b/deps/netifaces-0.10.5.dist-info/RECORD new file mode 100644 index 00000000..64ebb90f --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/RECORD @@ -0,0 +1,9 @@ +netifaces.cpython-34m.so,sha256=h6vTQiZljfNCsn4tfZ9cCKyB3D4IPNYutDyDc7lPLMU,58292 +netifaces-0.10.5.dist-info/DESCRIPTION.rst,sha256=EyJf6yFbUVkw5TpZ0M61ZeVbtCZdHYAqSdfTXT4lo6w,8397 +netifaces-0.10.5.dist-info/METADATA,sha256=dFRf6XQxPnSfFj-Byp7YcB9i8ajFwp-RMIDdpgTVWVE,9130 +netifaces-0.10.5.dist-info/RECORD,, +netifaces-0.10.5.dist-info/WHEEL,sha256=qlXKyUxaKdKGVZKHdQaMFOPNWg5qC8OVyb-aicJwJ6U,104 +netifaces-0.10.5.dist-info/metadata.json,sha256=skM3ng45s5k2xxXi2JurkARd2uf_wKGTul08T1_36BI,846 +netifaces-0.10.5.dist-info/top_level.txt,sha256=PqMTaIuWtSjkdQHX6lH1Lmpv2aqBUYAGqATB8z3A6TQ,10 +netifaces-0.10.5.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +netifaces-0.10.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 diff --git a/deps/netifaces-0.10.5.dist-info/WHEEL b/deps/netifaces-0.10.5.dist-info/WHEEL new file mode 100644 index 00000000..982a5e31 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp34-cp34m-linux_armv7l + diff --git a/deps/netifaces-0.10.5.dist-info/metadata.json b/deps/netifaces-0.10.5.dist-info/metadata.json new file mode 100644 index 00000000..793cb237 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: System :: Networking", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "alastair@alastairs-place.net", "name": "Alastair Houghton", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/al45tair/netifaces"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "netifaces", "summary": "Portable network interface information.", "version": "0.10.5"} \ No newline at end of file diff --git a/deps/netifaces-0.10.5.dist-info/top_level.txt b/deps/netifaces-0.10.5.dist-info/top_level.txt new file mode 100644 index 00000000..3f008fd6 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/top_level.txt @@ -0,0 +1 @@ +netifaces diff --git a/deps/netifaces-0.10.5.dist-info/zip-safe b/deps/netifaces-0.10.5.dist-info/zip-safe new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/deps/netifaces-0.10.5.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/deps/netifaces.cpython-34m.so b/deps/netifaces.cpython-34m.so new file mode 100755 index 00000000..6d35b8c1 Binary files /dev/null and b/deps/netifaces.cpython-34m.so differ diff --git a/deps/nmap/__init__.py b/deps/nmap/__init__.py new file mode 100644 index 00000000..fb7a6c5b --- /dev/null +++ b/deps/nmap/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: latin-1 -*- + +""" +python-nmap - 2010.12.17 + +python-nmap is a python library which helps in using nmap port scanner. +It allows to easilly manipulate nmap scan results and will be a perfect +tool for systems administrators who want to automatize scanning task +and reports. It also supports nmap script outputs. + + +Author : + +* Alexandre Norman - norman@xael.org + +Contributors: + +* Steve 'Ashcrow' Milner - steve@gnulinux.net +* Brian Bustin - brian at bustin.us +* old.schepperhand +* Johan Lundberg +* Thomas D. maaaaz + +Licence : GPL v3 or any later version + + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program. If not, see . +""" + +from .nmap import * # noqa +from .nmap import __author__ # noqa +from .nmap import __version__ # noqa +from .nmap import __last_modification__ # noqa diff --git a/deps/nmap/__pycache__/__init__.cpython-34.pyc b/deps/nmap/__pycache__/__init__.cpython-34.pyc new file mode 100644 index 00000000..ba1d8433 Binary files /dev/null and b/deps/nmap/__pycache__/__init__.cpython-34.pyc differ diff --git a/deps/nmap/__pycache__/nmap.cpython-34.pyc b/deps/nmap/__pycache__/nmap.cpython-34.pyc new file mode 100644 index 00000000..18fba348 Binary files /dev/null and b/deps/nmap/__pycache__/nmap.cpython-34.pyc differ diff --git a/deps/nmap/__pycache__/test.cpython-34.pyc b/deps/nmap/__pycache__/test.cpython-34.pyc new file mode 100644 index 00000000..cd26ad2a Binary files /dev/null and b/deps/nmap/__pycache__/test.cpython-34.pyc differ diff --git a/deps/nmap/__pycache__/test_nmap.cpython-34.pyc b/deps/nmap/__pycache__/test_nmap.cpython-34.pyc new file mode 100644 index 00000000..13751feb Binary files /dev/null and b/deps/nmap/__pycache__/test_nmap.cpython-34.pyc differ diff --git a/deps/nmap/nmap.py b/deps/nmap/nmap.py new file mode 100644 index 00000000..423daa2f --- /dev/null +++ b/deps/nmap/nmap.py @@ -0,0 +1,1148 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +nmap.py - version and date, see below + +Source code : https://bitbucket.org/xael/python-nmap + +Author : + +* Alexandre Norman - norman at xael.org + +Contributors: + +* Steve 'Ashcrow' Milner - steve at gnulinux.net +* Brian Bustin - brian at bustin.us +* old.schepperhand +* Johan Lundberg +* Thomas D. maaaaz +* Robert Bost +* David Peltier + +Licence: GPL v3 or any later version for python-nmap + + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program. If not, see . + + +************** +IMPORTANT NOTE +************** + +The Nmap Security Scanner used by python-nmap is distributed +under it's own licence that you can find at https://svn.nmap.org/nmap/COPYING + +Any redistribution of python-nmap along with the Nmap Security Scanner +must conform to the Nmap Security Scanner licence + +""" + + +__author__ = 'Alexandre Norman (norman@xael.org)' +__version__ = '0.6.1' +__last_modification__ = '2016.07.29' + + +import csv +import io +import os +import re +import shlex +import subprocess +import sys +from xml.etree import ElementTree as ET + +try: + from multiprocessing import Process +except ImportError: + # For pre 2.6 releases + from threading import Thread as Process + +############################################################################ + + +class PortScanner(object): + """ + PortScanner class allows to use nmap from python + + """ + + def __init__(self, nmap_search_path=('nmap', '/usr/bin/nmap', '/usr/local/bin/nmap', '/sw/bin/nmap', '/opt/local/bin/nmap')): + """ + Initialize PortScanner module + + * detects nmap on the system and nmap version + * may raise PortScannerError exception if nmap is not found in the path + + :param nmap_search_path: tupple of string where to search for nmap executable. Change this if you want to use a specific version of nmap. + :returns: nothing + + """ + self._nmap_path = '' # nmap path + self._scan_result = {} + self._nmap_version_number = 0 # nmap version number + self._nmap_subversion_number = 0 # nmap subversion number + self._nmap_last_output = '' # last full ascii nmap output + is_nmap_found = False # true if we have found nmap + + self.__process = None + + # regex used to detect nmap (http or https) + regex = re.compile( + 'Nmap version [0-9]*\.[0-9]*[^ ]* \( http(|s)://.* \)' + ) + # launch 'nmap -V', we wait after + #'Nmap version 5.0 ( http://nmap.org )' + # This is for Mac OSX. When idle3 is launched from the finder, PATH is not set so nmap was not found + for nmap_path in nmap_search_path: + try: + if sys.platform.startswith('freebsd') \ + or sys.platform.startswith('linux') \ + or sys.platform.startswith('darwin'): + p = subprocess.Popen([nmap_path, '-V'], + bufsize=10000, + stdout=subprocess.PIPE, + close_fds=True) + else: + p = subprocess.Popen([nmap_path, '-V'], + bufsize=10000, + stdout=subprocess.PIPE) + + except OSError: + pass + else: + self._nmap_path = nmap_path # save path + break + else: + raise PortScannerError( + 'nmap program was not found in path. PATH is : {0}'.format( + os.getenv('PATH') + ) + ) + + self._nmap_last_output = bytes.decode(p.communicate()[0]) # sav stdout + for line in self._nmap_last_output.split(os.linesep): + if regex.match(line) is not None: + is_nmap_found = True + # Search for version number + regex_version = re.compile('[0-9]+') + regex_subversion = re.compile('\.[0-9]+') + + rv = regex_version.search(line) + rsv = regex_subversion.search(line) + + if rv is not None and rsv is not None: + # extract version/subversion + self._nmap_version_number = int(line[rv.start():rv.end()]) + self._nmap_subversion_number = int( + line[rsv.start()+1:rsv.end()] + ) + break + + if not is_nmap_found: + raise PortScannerError('nmap program was not found in path') + + return + + def get_nmap_last_output(self): + """ + Returns the last text output of nmap in raw text + this may be used for debugging purpose + + :returns: string containing the last text output of nmap in raw text + """ + return self._nmap_last_output + + def nmap_version(self): + """ + returns nmap version if detected (int version, int subversion) + or (0, 0) if unknown + :returns: (nmap_version_number, nmap_subversion_number) + """ + return (self._nmap_version_number, self._nmap_subversion_number) + + def listscan(self, hosts='127.0.0.1'): + """ + do not scan but interpret target hosts and return a list a hosts + """ + assert type(hosts) is str, 'Wrong type for [hosts], should be a string [was {0}]'.format(type(hosts)) # noqa + output = self.scan(hosts, arguments='-sL') + # Test if host was IPV6 + try: + if 'looks like an IPv6 target specification' in output['nmap']['scaninfo']['error'][0]: # noqa + self.scan(hosts, arguments='-sL -6') + except KeyError: + pass + + return self.all_hosts() + + def scan(self, hosts='127.0.0.1', ports=None, arguments='-sV', sudo=False): + """ + Scan given hosts + + May raise PortScannerError exception if nmap output was not xml + + Test existance of the following key to know + if something went wrong : ['nmap']['scaninfo']['error'] + If not present, everything was ok. + + :param hosts: string for hosts as nmap use it 'scanme.nmap.org' or '198.116.0-255.1-127' or '216.163.128.20/20' + :param ports: string for ports as nmap use it '22,53,110,143-4564' + :param arguments: string of arguments for nmap '-sU -sX -sC' + :param sudo: launch nmap with sudo if True + + :returns: scan_result as dictionnary + """ + if sys.version_info[0]==2: + assert type(hosts) in (str, unicode), 'Wrong type for [hosts], should be a string [was {0}]'.format(type(hosts)) # noqa + assert type(ports) in (str, unicode, type(None)), 'Wrong type for [ports], should be a string [was {0}]'.format(type(ports)) # noqa + assert type(arguments) in (str, unicode), 'Wrong type for [arguments], should be a string [was {0}]'.format(type(arguments)) # noqa + else: + assert type(hosts) is str, 'Wrong type for [hosts], should be a string [was {0}]'.format(type(hosts)) # noqa + assert type(ports) in (str, type(None)), 'Wrong type for [ports], should be a string [was {0}]'.format(type(ports)) # noqa + assert type(arguments) is str, 'Wrong type for [arguments], should be a string [was {0}]'.format(type(arguments)) # noqa + + for redirecting_output in ['-oX', '-oA']: + assert redirecting_output not in arguments, 'Xml output can\'t be redirected from command line.\nYou can access it after a scan using:\nnmap.nm.get_nmap_last_output()' # noqa + + h_args = shlex.split(hosts) + f_args = shlex.split(arguments) + + # Launch scan + args = [self._nmap_path, '-oX', '-'] + h_args + ['-p', ports]*(ports is not None) + f_args + if sudo: + args = ['sudo'] + args + + p = subprocess.Popen(args, bufsize=100000, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + # wait until finished + # get output + (self._nmap_last_output, nmap_err) = p.communicate() + self._nmap_last_output = bytes.decode(self._nmap_last_output) + nmap_err = bytes.decode(nmap_err) + + # If there was something on stderr, there was a problem so abort... in + # fact not always. As stated by AlenLPeacock : + # This actually makes python-nmap mostly unusable on most real-life + # networks -- a particular subnet might have dozens of scannable hosts, + # but if a single one is unreachable or unroutable during the scan, + # nmap.scan() returns nothing. This behavior also diverges significantly + # from commandline nmap, which simply stderrs individual problems but + # keeps on trucking. + + nmap_err_keep_trace = [] + nmap_warn_keep_trace = [] + if len(nmap_err) > 0: + regex_warning = re.compile('^Warning: .*', re.IGNORECASE) + for line in nmap_err.split(os.linesep): + if len(line) > 0: + rgw = regex_warning.search(line) + if rgw is not None: + # sys.stderr.write(line+os.linesep) + nmap_warn_keep_trace.append(line+os.linesep) + else: + # raise PortScannerError(nmap_err) + nmap_err_keep_trace.append(nmap_err) + + return self.analyse_nmap_xml_scan( + nmap_xml_output=self._nmap_last_output, + nmap_err=nmap_err, + nmap_err_keep_trace=nmap_err_keep_trace, + nmap_warn_keep_trace=nmap_warn_keep_trace + ) + + + def analyse_nmap_xml_scan(self, nmap_xml_output=None, nmap_err='', nmap_err_keep_trace='', nmap_warn_keep_trace=''): + """ + Analyses NMAP xml scan ouput + + May raise PortScannerError exception if nmap output was not xml + + Test existance of the following key to know if something went wrong : ['nmap']['scaninfo']['error'] + If not present, everything was ok. + + :param nmap_xml_output: xml string to analyse + :returns: scan_result as dictionnary + """ + + # nmap xml output looks like : + # + # + #
+ # + # + # + # + # + # + # + # + # + # + # + # + # + + + + +
+''' +FOOTER = u'''\ + +
+ +
+
+

Console Locked

+

+ The console is locked and needs to be unlocked by entering the PIN. + You can find the PIN printed out on the standard output of your + shell that runs the server. +

+

PIN: + + +

+
+
+ + +''' + +PAGE_HTML = HEADER + u'''\ +

%(exception_type)s

+
+

%(exception)s

+
+

Traceback (most recent call last)

+%(summary)s +
+
+

+ + This is the Copy/Paste friendly version of the traceback. You can also paste this traceback into + a gist: + +

+ +
+
+
+ The debugger caught an exception in your WSGI application. You can now + look at the traceback which led to the error. + If you enable JavaScript you can also use additional features such as code + execution (if the evalex feature is enabled), automatic pasting of the + exceptions and much more. +
+''' + FOOTER + ''' + +''' + +CONSOLE_HTML = HEADER + u'''\ +

Interactive Console

+
+In this console you can execute Python expressions in the context of the +application. The initial namespace was created by the debugger automatically. +
+
The Console requires JavaScript.
+''' + FOOTER + +SUMMARY_HTML = u'''\ +
+ %(title)s +
    %(frames)s
+ %(description)s +
+''' + +FRAME_HTML = u'''\ +
+

File "%(filename)s", + line %(lineno)s, + in %(function_name)s

+
%(lines)s
+
+''' + +SOURCE_LINE_HTML = u'''\ + + %(lineno)s + %(code)s + +''' + + +def render_console_html(secret, evalex_trusted=True): + return CONSOLE_HTML % { + 'evalex': 'true', + 'evalex_trusted': evalex_trusted and 'true' or 'false', + 'console': 'true', + 'title': 'Console', + 'secret': secret, + 'traceback_id': -1 + } + + +def get_current_traceback(ignore_system_exceptions=False, + show_hidden_frames=False, skip=0): + """Get the current exception info as `Traceback` object. Per default + calling this method will reraise system exceptions such as generator exit, + system exit or others. This behavior can be disabled by passing `False` + to the function as first parameter. + """ + exc_type, exc_value, tb = sys.exc_info() + if ignore_system_exceptions and exc_type in system_exceptions: + raise + for x in range_type(skip): + if tb.tb_next is None: + break + tb = tb.tb_next + tb = Traceback(exc_type, exc_value, tb) + if not show_hidden_frames: + tb.filter_hidden_frames() + return tb + + +class Line(object): + """Helper for the source renderer.""" + __slots__ = ('lineno', 'code', 'in_frame', 'current') + + def __init__(self, lineno, code): + self.lineno = lineno + self.code = code + self.in_frame = False + self.current = False + + def classes(self): + rv = ['line'] + if self.in_frame: + rv.append('in-frame') + if self.current: + rv.append('current') + return rv + classes = property(classes) + + def render(self): + return SOURCE_LINE_HTML % { + 'classes': u' '.join(self.classes), + 'lineno': self.lineno, + 'code': escape(self.code) + } + + +class Traceback(object): + """Wraps a traceback.""" + + def __init__(self, exc_type, exc_value, tb): + self.exc_type = exc_type + self.exc_value = exc_value + if not isinstance(exc_type, str): + exception_type = exc_type.__name__ + if exc_type.__module__ not in ('__builtin__', 'exceptions'): + exception_type = exc_type.__module__ + '.' + exception_type + else: + exception_type = exc_type + self.exception_type = exception_type + + # we only add frames to the list that are not hidden. This follows + # the the magic variables as defined by paste.exceptions.collector + self.frames = [] + while tb: + self.frames.append(Frame(exc_type, exc_value, tb)) + tb = tb.tb_next + + def filter_hidden_frames(self): + """Remove the frames according to the paste spec.""" + if not self.frames: + return + + new_frames = [] + hidden = False + for frame in self.frames: + hide = frame.hide + if hide in ('before', 'before_and_this'): + new_frames = [] + hidden = False + if hide == 'before_and_this': + continue + elif hide in ('reset', 'reset_and_this'): + hidden = False + if hide == 'reset_and_this': + continue + elif hide in ('after', 'after_and_this'): + hidden = True + if hide == 'after_and_this': + continue + elif hide or hidden: + continue + new_frames.append(frame) + + # if we only have one frame and that frame is from the codeop + # module, remove it. + if len(new_frames) == 1 and self.frames[0].module == 'codeop': + del self.frames[:] + + # if the last frame is missing something went terrible wrong :( + elif self.frames[-1] in new_frames: + self.frames[:] = new_frames + + def is_syntax_error(self): + """Is it a syntax error?""" + return isinstance(self.exc_value, SyntaxError) + is_syntax_error = property(is_syntax_error) + + def exception(self): + """String representation of the exception.""" + buf = traceback.format_exception_only(self.exc_type, self.exc_value) + rv = ''.join(buf).strip() + return rv.decode('utf-8', 'replace') if PY2 else rv + exception = property(exception) + + def log(self, logfile=None): + """Log the ASCII traceback into a file object.""" + if logfile is None: + logfile = sys.stderr + tb = self.plaintext.rstrip() + u'\n' + if PY2: + tb = tb.encode('utf-8', 'replace') + logfile.write(tb) + + def paste(self): + """Create a paste and return the paste id.""" + data = json.dumps({ + 'description': 'Werkzeug Internal Server Error', + 'public': False, + 'files': { + 'traceback.txt': { + 'content': self.plaintext + } + } + }).encode('utf-8') + try: + from urllib2 import urlopen + except ImportError: + from urllib.request import urlopen + rv = urlopen('https://api.github.com/gists', data=data) + resp = json.loads(rv.read().decode('utf-8')) + rv.close() + return { + 'url': resp['html_url'], + 'id': resp['id'] + } + + def render_summary(self, include_title=True): + """Render the traceback for the interactive console.""" + title = '' + frames = [] + classes = ['traceback'] + if not self.frames: + classes.append('noframe-traceback') + + if include_title: + if self.is_syntax_error: + title = u'Syntax Error' + else: + title = u'Traceback (most recent call last):' + + for frame in self.frames: + frames.append(u'%s' % ( + frame.info and u' title="%s"' % escape(frame.info) or u'', + frame.render() + )) + + if self.is_syntax_error: + description_wrapper = u'
%s
' + else: + description_wrapper = u'
%s
' + + return SUMMARY_HTML % { + 'classes': u' '.join(classes), + 'title': title and u'

%s

' % title or u'', + 'frames': u'\n'.join(frames), + 'description': description_wrapper % escape(self.exception) + } + + def render_full(self, evalex=False, secret=None, + evalex_trusted=True): + """Render the Full HTML page with the traceback info.""" + exc = escape(self.exception) + return PAGE_HTML % { + 'evalex': evalex and 'true' or 'false', + 'evalex_trusted': evalex_trusted and 'true' or 'false', + 'console': 'false', + 'title': exc, + 'exception': exc, + 'exception_type': escape(self.exception_type), + 'summary': self.render_summary(include_title=False), + 'plaintext': escape(self.plaintext), + 'plaintext_cs': re.sub('-{2,}', '-', self.plaintext), + 'traceback_id': self.id, + 'secret': secret + } + + def generate_plaintext_traceback(self): + """Like the plaintext attribute but returns a generator""" + yield u'Traceback (most recent call last):' + for frame in self.frames: + yield u' File "%s", line %s, in %s' % ( + frame.filename, + frame.lineno, + frame.function_name + ) + yield u' ' + frame.current_line.strip() + yield self.exception + + def plaintext(self): + return u'\n'.join(self.generate_plaintext_traceback()) + plaintext = cached_property(plaintext) + + id = property(lambda x: id(x)) + + +class Frame(object): + + """A single frame in a traceback.""" + + def __init__(self, exc_type, exc_value, tb): + self.lineno = tb.tb_lineno + self.function_name = tb.tb_frame.f_code.co_name + self.locals = tb.tb_frame.f_locals + self.globals = tb.tb_frame.f_globals + + fn = inspect.getsourcefile(tb) or inspect.getfile(tb) + if fn[-4:] in ('.pyo', '.pyc'): + fn = fn[:-1] + # if it's a file on the file system resolve the real filename. + if os.path.isfile(fn): + fn = os.path.realpath(fn) + self.filename = to_unicode(fn, get_filesystem_encoding()) + self.module = self.globals.get('__name__') + self.loader = self.globals.get('__loader__') + self.code = tb.tb_frame.f_code + + # support for paste's traceback extensions + self.hide = self.locals.get('__traceback_hide__', False) + info = self.locals.get('__traceback_info__') + if info is not None: + try: + info = text_type(info) + except UnicodeError: + info = str(info).decode('utf-8', 'replace') + self.info = info + + def render(self): + """Render a single frame in a traceback.""" + return FRAME_HTML % { + 'id': self.id, + 'filename': escape(self.filename), + 'lineno': self.lineno, + 'function_name': escape(self.function_name), + 'lines': self.render_line_context(), + } + + def render_line_context(self): + before, current, after = self.get_context_lines() + rv = [] + + def render_line(line, cls): + line = line.expandtabs().rstrip() + stripped_line = line.strip() + prefix = len(line) - len(stripped_line) + rv.append( + '
%s%s
' % ( + cls, ' ' * prefix, escape(stripped_line) or ' ')) + + for line in before: + render_line(line, 'before') + render_line(current, 'current') + for line in after: + render_line(line, 'after') + + return '\n'.join(rv) + + def get_annotated_lines(self): + """Helper function that returns lines with extra information.""" + lines = [Line(idx + 1, x) for idx, x in enumerate(self.sourcelines)] + + # find function definition and mark lines + if hasattr(self.code, 'co_firstlineno'): + lineno = self.code.co_firstlineno - 1 + while lineno > 0: + if _funcdef_re.match(lines[lineno].code): + break + lineno -= 1 + try: + offset = len(inspect.getblock([x.code + '\n' for x + in lines[lineno:]])) + except TokenError: + offset = 0 + for line in lines[lineno:lineno + offset]: + line.in_frame = True + + # mark current line + try: + lines[self.lineno - 1].current = True + except IndexError: + pass + + return lines + + def eval(self, code, mode='single'): + """Evaluate code in the context of the frame.""" + if isinstance(code, string_types): + if PY2 and isinstance(code, unicode): # noqa + code = UTF8_COOKIE + code.encode('utf-8') + code = compile(code, '', mode) + return eval(code, self.globals, self.locals) + + @cached_property + def sourcelines(self): + """The sourcecode of the file as list of unicode strings.""" + # get sourcecode from loader or file + source = None + if self.loader is not None: + try: + if hasattr(self.loader, 'get_source'): + source = self.loader.get_source(self.module) + elif hasattr(self.loader, 'get_source_by_code'): + source = self.loader.get_source_by_code(self.code) + except Exception: + # we munch the exception so that we don't cause troubles + # if the loader is broken. + pass + + if source is None: + try: + f = open(to_native(self.filename, get_filesystem_encoding()), + mode='rb') + except IOError: + return [] + try: + source = f.read() + finally: + f.close() + + # already unicode? return right away + if isinstance(source, text_type): + return source.splitlines() + + # yes. it should be ascii, but we don't want to reject too many + # characters in the debugger if something breaks + charset = 'utf-8' + if source.startswith(UTF8_COOKIE): + source = source[3:] + else: + for idx, match in enumerate(_line_re.finditer(source)): + match = _coding_re.search(match.group()) + if match is not None: + charset = match.group(1) + break + if idx > 1: + break + + # on broken cookies we fall back to utf-8 too + charset = to_native(charset) + try: + codecs.lookup(charset) + except LookupError: + charset = 'utf-8' + + return source.decode(charset, 'replace').splitlines() + + def get_context_lines(self, context=5): + before = self.sourcelines[self.lineno - context - 1:self.lineno - 1] + past = self.sourcelines[self.lineno:self.lineno + context] + return ( + before, + self.current_line, + past, + ) + + @property + def current_line(self): + try: + return self.sourcelines[self.lineno - 1] + except IndexError: + return u'' + + @cached_property + def console(self): + return Console(self.globals, self.locals) + + id = property(lambda x: id(x)) diff --git a/deps/werkzeug/exceptions.py b/deps/werkzeug/exceptions.py new file mode 100644 index 00000000..392e6759 --- /dev/null +++ b/deps/werkzeug/exceptions.py @@ -0,0 +1,658 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.exceptions + ~~~~~~~~~~~~~~~~~~~ + + This module implements a number of Python exceptions you can raise from + within your views to trigger a standard non-200 response. + + + Usage Example + ------------- + + :: + + from werkzeug.wrappers import BaseRequest + from werkzeug.wsgi import responder + from werkzeug.exceptions import HTTPException, NotFound + + def view(request): + raise NotFound() + + @responder + def application(environ, start_response): + request = BaseRequest(environ) + try: + return view(request) + except HTTPException as e: + return e + + + As you can see from this example those exceptions are callable WSGI + applications. Because of Python 2.4 compatibility those do not extend + from the response objects but only from the python exception class. + + As a matter of fact they are not Werkzeug response objects. However you + can get a response object by calling ``get_response()`` on a HTTP + exception. + + Keep in mind that you have to pass an environment to ``get_response()`` + because some errors fetch additional information from the WSGI + environment. + + If you want to hook in a different exception page to say, a 404 status + code, you can add a second except for a specific subclass of an error:: + + @responder + def application(environ, start_response): + request = BaseRequest(environ) + try: + return view(request) + except NotFound, e: + return not_found(request) + except HTTPException, e: + return e + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import sys + +# Because of bootstrapping reasons we need to manually patch ourselves +# onto our parent module. +import werkzeug +werkzeug.exceptions = sys.modules[__name__] + +from werkzeug._internal import _get_environ +from werkzeug._compat import iteritems, integer_types, text_type, \ + implements_to_string + +from werkzeug.wrappers import Response + + +@implements_to_string +class HTTPException(Exception): + + """ + Baseclass for all HTTP exceptions. This exception can be called as WSGI + application to render a default error page or you can catch the subclasses + of it independently and render nicer error messages. + """ + + code = None + description = None + + def __init__(self, description=None, response=None): + Exception.__init__(self) + if description is not None: + self.description = description + self.response = response + + @classmethod + def wrap(cls, exception, name=None): + """This method returns a new subclass of the exception provided that + also is a subclass of `BadRequest`. + """ + class newcls(cls, exception): + + def __init__(self, arg=None, *args, **kwargs): + cls.__init__(self, *args, **kwargs) + exception.__init__(self, arg) + newcls.__module__ = sys._getframe(1).f_globals.get('__name__') + newcls.__name__ = name or cls.__name__ + exception.__name__ + return newcls + + @property + def name(self): + """The status name.""" + return HTTP_STATUS_CODES.get(self.code, 'Unknown Error') + + def get_description(self, environ=None): + """Get the description.""" + return u'

%s

' % escape(self.description) + + def get_body(self, environ=None): + """Get the HTML body.""" + return text_type(( + u'\n' + u'%(code)s %(name)s\n' + u'

%(name)s

\n' + u'%(description)s\n' + ) % { + 'code': self.code, + 'name': escape(self.name), + 'description': self.get_description(environ) + }) + + def get_headers(self, environ=None): + """Get a list of headers.""" + return [('Content-Type', 'text/html')] + + def get_response(self, environ=None): + """Get a response object. If one was passed to the exception + it's returned directly. + + :param environ: the optional environ for the request. This + can be used to modify the response depending + on how the request looked like. + :return: a :class:`Response` object or a subclass thereof. + """ + if self.response is not None: + return self.response + if environ is not None: + environ = _get_environ(environ) + headers = self.get_headers(environ) + return Response(self.get_body(environ), self.code, headers) + + def __call__(self, environ, start_response): + """Call the exception as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + """ + response = self.get_response(environ) + return response(environ, start_response) + + def __str__(self): + return '%d: %s' % (self.code, self.name) + + def __repr__(self): + return '<%s \'%s\'>' % (self.__class__.__name__, self) + + +class BadRequest(HTTPException): + + """*400* `Bad Request` + + Raise if the browser sends something to the application the application + or server cannot handle. + """ + code = 400 + description = ( + 'The browser (or proxy) sent a request that this server could ' + 'not understand.' + ) + + +class ClientDisconnected(BadRequest): + + """Internal exception that is raised if Werkzeug detects a disconnected + client. Since the client is already gone at that point attempting to + send the error message to the client might not work and might ultimately + result in another exception in the server. Mainly this is here so that + it is silenced by default as far as Werkzeug is concerned. + + Since disconnections cannot be reliably detected and are unspecified + by WSGI to a large extent this might or might not be raised if a client + is gone. + + .. versionadded:: 0.8 + """ + + +class SecurityError(BadRequest): + + """Raised if something triggers a security error. This is otherwise + exactly like a bad request error. + + .. versionadded:: 0.9 + """ + + +class BadHost(BadRequest): + + """Raised if the submitted host is badly formatted. + + .. versionadded:: 0.11.2 + """ + + +class Unauthorized(HTTPException): + + """*401* `Unauthorized` + + Raise if the user is not authorized. Also used if you want to use HTTP + basic auth. + """ + code = 401 + description = ( + 'The server could not verify that you are authorized to access ' + 'the URL requested. You either supplied the wrong credentials (e.g. ' + 'a bad password), or your browser doesn\'t understand how to supply ' + 'the credentials required.' + ) + + +class Forbidden(HTTPException): + + """*403* `Forbidden` + + Raise if the user doesn't have the permission for the requested resource + but was authenticated. + """ + code = 403 + description = ( + 'You don\'t have the permission to access the requested resource. ' + 'It is either read-protected or not readable by the server.' + ) + + +class NotFound(HTTPException): + + """*404* `Not Found` + + Raise if a resource does not exist and never existed. + """ + code = 404 + description = ( + 'The requested URL was not found on the server. ' + 'If you entered the URL manually please check your spelling and ' + 'try again.' + ) + + +class MethodNotAllowed(HTTPException): + + """*405* `Method Not Allowed` + + Raise if the server used a method the resource does not handle. For + example `POST` if the resource is view only. Especially useful for REST. + + The first argument for this exception should be a list of allowed methods. + Strictly speaking the response would be invalid if you don't provide valid + methods in the header which you can do with that list. + """ + code = 405 + description = 'The method is not allowed for the requested URL.' + + def __init__(self, valid_methods=None, description=None): + """Takes an optional list of valid http methods + starting with werkzeug 0.3 the list will be mandatory.""" + HTTPException.__init__(self, description) + self.valid_methods = valid_methods + + def get_headers(self, environ): + headers = HTTPException.get_headers(self, environ) + if self.valid_methods: + headers.append(('Allow', ', '.join(self.valid_methods))) + return headers + + +class NotAcceptable(HTTPException): + + """*406* `Not Acceptable` + + Raise if the server can't return any content conforming to the + `Accept` headers of the client. + """ + code = 406 + + description = ( + 'The resource identified by the request is only capable of ' + 'generating response entities which have content characteristics ' + 'not acceptable according to the accept headers sent in the ' + 'request.' + ) + + +class RequestTimeout(HTTPException): + + """*408* `Request Timeout` + + Raise to signalize a timeout. + """ + code = 408 + description = ( + 'The server closed the network connection because the browser ' + 'didn\'t finish the request within the specified time.' + ) + + +class Conflict(HTTPException): + + """*409* `Conflict` + + Raise to signal that a request cannot be completed because it conflicts + with the current state on the server. + + .. versionadded:: 0.7 + """ + code = 409 + description = ( + 'A conflict happened while processing the request. The resource ' + 'might have been modified while the request was being processed.' + ) + + +class Gone(HTTPException): + + """*410* `Gone` + + Raise if a resource existed previously and went away without new location. + """ + code = 410 + description = ( + 'The requested URL is no longer available on this server and there ' + 'is no forwarding address. If you followed a link from a foreign ' + 'page, please contact the author of this page.' + ) + + +class LengthRequired(HTTPException): + + """*411* `Length Required` + + Raise if the browser submitted data but no ``Content-Length`` header which + is required for the kind of processing the server does. + """ + code = 411 + description = ( + 'A request with this method requires a valid Content-' + 'Length header.' + ) + + +class PreconditionFailed(HTTPException): + + """*412* `Precondition Failed` + + Status code used in combination with ``If-Match``, ``If-None-Match``, or + ``If-Unmodified-Since``. + """ + code = 412 + description = ( + 'The precondition on the request for the URL failed positive ' + 'evaluation.' + ) + + +class RequestEntityTooLarge(HTTPException): + + """*413* `Request Entity Too Large` + + The status code one should return if the data submitted exceeded a given + limit. + """ + code = 413 + description = ( + 'The data value transmitted exceeds the capacity limit.' + ) + + +class RequestURITooLarge(HTTPException): + + """*414* `Request URI Too Large` + + Like *413* but for too long URLs. + """ + code = 414 + description = ( + 'The length of the requested URL exceeds the capacity limit ' + 'for this server. The request cannot be processed.' + ) + + +class UnsupportedMediaType(HTTPException): + + """*415* `Unsupported Media Type` + + The status code returned if the server is unable to handle the media type + the client transmitted. + """ + code = 415 + description = ( + 'The server does not support the media type transmitted in ' + 'the request.' + ) + + +class RequestedRangeNotSatisfiable(HTTPException): + + """*416* `Requested Range Not Satisfiable` + + The client asked for a part of the file that lies beyond the end + of the file. + + .. versionadded:: 0.7 + """ + code = 416 + description = ( + 'The server cannot provide the requested range.' + ) + + +class ExpectationFailed(HTTPException): + + """*417* `Expectation Failed` + + The server cannot meet the requirements of the Expect request-header. + + .. versionadded:: 0.7 + """ + code = 417 + description = ( + 'The server could not meet the requirements of the Expect header' + ) + + +class ImATeapot(HTTPException): + + """*418* `I'm a teapot` + + The server should return this if it is a teapot and someone attempted + to brew coffee with it. + + .. versionadded:: 0.7 + """ + code = 418 + description = ( + 'This server is a teapot, not a coffee machine' + ) + + +class UnprocessableEntity(HTTPException): + + """*422* `Unprocessable Entity` + + Used if the request is well formed, but the instructions are otherwise + incorrect. + """ + code = 422 + description = ( + 'The request was well-formed but was unable to be followed ' + 'due to semantic errors.' + ) + + +class PreconditionRequired(HTTPException): + + """*428* `Precondition Required` + + The server requires this request to be conditional, typically to prevent + the lost update problem, which is a race condition between two or more + clients attempting to update a resource through PUT or DELETE. By requiring + each client to include a conditional header ("If-Match" or "If-Unmodified- + Since") with the proper value retained from a recent GET request, the + server ensures that each client has at least seen the previous revision of + the resource. + """ + code = 428 + description = ( + 'This request is required to be conditional; try using "If-Match" ' + 'or "If-Unmodified-Since".' + ) + + +class TooManyRequests(HTTPException): + + """*429* `Too Many Requests` + + The server is limiting the rate at which this user receives responses, and + this request exceeds that rate. (The server may use any convenient method + to identify users and their request rates). The server may include a + "Retry-After" header to indicate how long the user should wait before + retrying. + """ + code = 429 + description = ( + 'This user has exceeded an allotted request count. Try again later.' + ) + + +class RequestHeaderFieldsTooLarge(HTTPException): + + """*431* `Request Header Fields Too Large` + + The server refuses to process the request because the header fields are too + large. One or more individual fields may be too large, or the set of all + headers is too large. + """ + code = 431 + description = ( + 'One or more header fields exceeds the maximum size.' + ) + + +class InternalServerError(HTTPException): + + """*500* `Internal Server Error` + + Raise if an internal server error occurred. This is a good fallback if an + unknown error occurred in the dispatcher. + """ + code = 500 + description = ( + 'The server encountered an internal error and was unable to ' + 'complete your request. Either the server is overloaded or there ' + 'is an error in the application.' + ) + + +class NotImplemented(HTTPException): + + """*501* `Not Implemented` + + Raise if the application does not support the action requested by the + browser. + """ + code = 501 + description = ( + 'The server does not support the action requested by the ' + 'browser.' + ) + + +class BadGateway(HTTPException): + + """*502* `Bad Gateway` + + If you do proxying in your application you should return this status code + if you received an invalid response from the upstream server it accessed + in attempting to fulfill the request. + """ + code = 502 + description = ( + 'The proxy server received an invalid response from an upstream ' + 'server.' + ) + + +class ServiceUnavailable(HTTPException): + + """*503* `Service Unavailable` + + Status code you should return if a service is temporarily unavailable. + """ + code = 503 + description = ( + 'The server is temporarily unable to service your request due to ' + 'maintenance downtime or capacity problems. Please try again ' + 'later.' + ) + + +class GatewayTimeout(HTTPException): + + """*504* `Gateway Timeout` + + Status code you should return if a connection to an upstream server + times out. + """ + code = 504 + description = ( + 'The connection to an upstream server timed out.' + ) + + +class HTTPVersionNotSupported(HTTPException): + + """*505* `HTTP Version Not Supported` + + The server does not support the HTTP protocol version used in the request. + """ + code = 505 + description = ( + 'The server does not support the HTTP protocol version used in the ' + 'request.' + ) + + +default_exceptions = {} +__all__ = ['HTTPException'] + + +def _find_exceptions(): + for name, obj in iteritems(globals()): + try: + is_http_exception = issubclass(obj, HTTPException) + except TypeError: + is_http_exception = False + if not is_http_exception or obj.code is None: + continue + __all__.append(obj.__name__) + old_obj = default_exceptions.get(obj.code, None) + if old_obj is not None and issubclass(obj, old_obj): + continue + default_exceptions[obj.code] = obj +_find_exceptions() +del _find_exceptions + + +class Aborter(object): + + """ + When passed a dict of code -> exception items it can be used as + callable that raises exceptions. If the first argument to the + callable is an integer it will be looked up in the mapping, if it's + a WSGI application it will be raised in a proxy exception. + + The rest of the arguments are forwarded to the exception constructor. + """ + + def __init__(self, mapping=None, extra=None): + if mapping is None: + mapping = default_exceptions + self.mapping = dict(mapping) + if extra is not None: + self.mapping.update(extra) + + def __call__(self, code, *args, **kwargs): + if not args and not kwargs and not isinstance(code, integer_types): + raise HTTPException(response=code) + if code not in self.mapping: + raise LookupError('no exception for %r' % code) + raise self.mapping[code](*args, **kwargs) + +abort = Aborter() + + +#: an exception that is used internally to signal both a key error and a +#: bad request. Used by a lot of the datastructures. +BadRequestKeyError = BadRequest.wrap(KeyError) + + +# imported here because of circular dependencies of werkzeug.utils +from werkzeug.utils import escape +from werkzeug.http import HTTP_STATUS_CODES diff --git a/deps/werkzeug/filesystem.py b/deps/werkzeug/filesystem.py new file mode 100644 index 00000000..62467465 --- /dev/null +++ b/deps/werkzeug/filesystem.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.filesystem + ~~~~~~~~~~~~~~~~~~~ + + Various utilities for the local filesystem. + + :copyright: (c) 2015 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" + +import codecs +import sys +import warnings + +# We do not trust traditional unixes. +has_likely_buggy_unicode_filesystem = \ + sys.platform.startswith('linux') or 'bsd' in sys.platform + + +def _is_ascii_encoding(encoding): + """ + Given an encoding this figures out if the encoding is actually ASCII (which + is something we don't actually want in most cases). This is necessary + because ASCII comes under many names such as ANSI_X3.4-1968. + """ + if encoding is None: + return False + try: + return codecs.lookup(encoding).name == 'ascii' + except LookupError: + return False + + +class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): + '''The warning used by Werkzeug to signal a broken filesystem. Will only be + used once per runtime.''' + + +_warned_about_filesystem_encoding = False + + +def get_filesystem_encoding(): + """ + Returns the filesystem encoding that should be used. Note that this is + different from the Python understanding of the filesystem encoding which + might be deeply flawed. Do not use this value against Python's unicode APIs + because it might be different. See :ref:`filesystem-encoding` for the exact + behavior. + + The concept of a filesystem encoding in generally is not something you + should rely on. As such if you ever need to use this function except for + writing wrapper code reconsider. + """ + global _warned_about_filesystem_encoding + rv = sys.getfilesystemencoding() + if has_likely_buggy_unicode_filesystem and not rv \ + or _is_ascii_encoding(rv): + if not _warned_about_filesystem_encoding: + warnings.warn( + 'Detected a misconfigured UNIX filesystem: Will use UTF-8 as ' + 'filesystem encoding instead of {0!r}'.format(rv), + BrokenFilesystemWarning) + _warned_about_filesystem_encoding = True + return 'utf-8' + return rv diff --git a/deps/werkzeug/formparser.py b/deps/werkzeug/formparser.py new file mode 100644 index 00000000..11486913 --- /dev/null +++ b/deps/werkzeug/formparser.py @@ -0,0 +1,526 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.formparser + ~~~~~~~~~~~~~~~~~~~ + + This module implements the form parsing. It supports url-encoded forms + as well as non-nested multipart uploads. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +import codecs +from io import BytesIO +from tempfile import TemporaryFile +from itertools import chain, repeat, tee +from functools import update_wrapper + +from werkzeug._compat import to_native, text_type +from werkzeug.urls import url_decode_stream +from werkzeug.wsgi import make_line_iter, \ + get_input_stream, get_content_length +from werkzeug.datastructures import Headers, FileStorage, MultiDict +from werkzeug.http import parse_options_header + + +#: an iterator that yields empty strings +_empty_string_iter = repeat('') + +#: a regular expression for multipart boundaries +_multipart_boundary_re = re.compile('^[ -~]{0,200}[!-~]$') + +#: supported http encodings that are also available in python we support +#: for multipart messages. +_supported_multipart_encodings = frozenset(['base64', 'quoted-printable']) + + +def default_stream_factory(total_content_length, filename, content_type, + content_length=None): + """The stream factory that is used per default.""" + if total_content_length > 1024 * 500: + return TemporaryFile('wb+') + return BytesIO() + + +def parse_form_data(environ, stream_factory=None, charset='utf-8', + errors='replace', max_form_memory_size=None, + max_content_length=None, cls=None, + silent=True): + """Parse the form data in the environ and return it as tuple in the form + ``(stream, form, files)``. You should only call this method if the + transport method is `POST`, `PUT`, or `PATCH`. + + If the mimetype of the data transmitted is `multipart/form-data` the + files multidict will be filled with `FileStorage` objects. If the + mimetype is unknown the input stream is wrapped and returned as first + argument, else the stream is empty. + + This is a shortcut for the common usage of :class:`FormDataParser`. + + Have a look at :ref:`dealing-with-request-data` for more details. + + .. versionadded:: 0.5 + The `max_form_memory_size`, `max_content_length` and + `cls` parameters were added. + + .. versionadded:: 0.5.1 + The optional `silent` flag was added. + + :param environ: the WSGI environment to be used for parsing. + :param stream_factory: An optional callable that returns a new read and + writeable file descriptor. This callable works + the same as :meth:`~BaseResponse._get_file_stream`. + :param charset: The character set for URL and url encoded form data. + :param errors: The encoding error behavior. + :param max_form_memory_size: the maximum number of bytes to be accepted for + in-memory stored form data. If the data + exceeds the value specified an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param max_content_length: If this is provided and the transmitted data + is longer than this value an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. + :return: A tuple in the form ``(stream, form, files)``. + """ + return FormDataParser(stream_factory, charset, errors, + max_form_memory_size, max_content_length, + cls, silent).parse_from_environ(environ) + + +def exhaust_stream(f): + """Helper decorator for methods that exhausts the stream on return.""" + + def wrapper(self, stream, *args, **kwargs): + try: + return f(self, stream, *args, **kwargs) + finally: + exhaust = getattr(stream, 'exhaust', None) + if exhaust is not None: + exhaust() + else: + while 1: + chunk = stream.read(1024 * 64) + if not chunk: + break + return update_wrapper(wrapper, f) + + +class FormDataParser(object): + + """This class implements parsing of form data for Werkzeug. By itself + it can parse multipart and url encoded form data. It can be subclassed + and extended but for most mimetypes it is a better idea to use the + untouched stream and expose it as separate attributes on a request + object. + + .. versionadded:: 0.8 + + :param stream_factory: An optional callable that returns a new read and + writeable file descriptor. This callable works + the same as :meth:`~BaseResponse._get_file_stream`. + :param charset: The character set for URL and url encoded form data. + :param errors: The encoding error behavior. + :param max_form_memory_size: the maximum number of bytes to be accepted for + in-memory stored form data. If the data + exceeds the value specified an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param max_content_length: If this is provided and the transmitted data + is longer than this value an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. + """ + + def __init__(self, stream_factory=None, charset='utf-8', + errors='replace', max_form_memory_size=None, + max_content_length=None, cls=None, + silent=True): + if stream_factory is None: + stream_factory = default_stream_factory + self.stream_factory = stream_factory + self.charset = charset + self.errors = errors + self.max_form_memory_size = max_form_memory_size + self.max_content_length = max_content_length + if cls is None: + cls = MultiDict + self.cls = cls + self.silent = silent + + def get_parse_func(self, mimetype, options): + return self.parse_functions.get(mimetype) + + def parse_from_environ(self, environ): + """Parses the information from the environment as form data. + + :param environ: the WSGI environment to be used for parsing. + :return: A tuple in the form ``(stream, form, files)``. + """ + content_type = environ.get('CONTENT_TYPE', '') + content_length = get_content_length(environ) + mimetype, options = parse_options_header(content_type) + return self.parse(get_input_stream(environ), mimetype, + content_length, options) + + def parse(self, stream, mimetype, content_length, options=None): + """Parses the information from the given stream, mimetype, + content length and mimetype parameters. + + :param stream: an input stream + :param mimetype: the mimetype of the data + :param content_length: the content length of the incoming data + :param options: optional mimetype parameters (used for + the multipart boundary for instance) + :return: A tuple in the form ``(stream, form, files)``. + """ + if self.max_content_length is not None and \ + content_length is not None and \ + content_length > self.max_content_length: + raise exceptions.RequestEntityTooLarge() + if options is None: + options = {} + + parse_func = self.get_parse_func(mimetype, options) + if parse_func is not None: + try: + return parse_func(self, stream, mimetype, + content_length, options) + except ValueError: + if not self.silent: + raise + + return stream, self.cls(), self.cls() + + @exhaust_stream + def _parse_multipart(self, stream, mimetype, content_length, options): + parser = MultiPartParser(self.stream_factory, self.charset, self.errors, + max_form_memory_size=self.max_form_memory_size, + cls=self.cls) + boundary = options.get('boundary') + if boundary is None: + raise ValueError('Missing boundary') + if isinstance(boundary, text_type): + boundary = boundary.encode('ascii') + form, files = parser.parse(stream, boundary, content_length) + return stream, form, files + + @exhaust_stream + def _parse_urlencoded(self, stream, mimetype, content_length, options): + if self.max_form_memory_size is not None and \ + content_length is not None and \ + content_length > self.max_form_memory_size: + raise exceptions.RequestEntityTooLarge() + form = url_decode_stream(stream, self.charset, + errors=self.errors, cls=self.cls) + return stream, form, self.cls() + + #: mapping of mimetypes to parsing functions + parse_functions = { + 'multipart/form-data': _parse_multipart, + 'application/x-www-form-urlencoded': _parse_urlencoded, + 'application/x-url-encoded': _parse_urlencoded + } + + +def is_valid_multipart_boundary(boundary): + """Checks if the string given is a valid multipart boundary.""" + return _multipart_boundary_re.match(boundary) is not None + + +def _line_parse(line): + """Removes line ending characters and returns a tuple (`stripped_line`, + `is_terminated`). + """ + if line[-2:] in ['\r\n', b'\r\n']: + return line[:-2], True + elif line[-1:] in ['\r', '\n', b'\r', b'\n']: + return line[:-1], True + return line, False + + +def parse_multipart_headers(iterable): + """Parses multipart headers from an iterable that yields lines (including + the trailing newline symbol). The iterable has to be newline terminated. + + The iterable will stop at the line where the headers ended so it can be + further consumed. + + :param iterable: iterable of strings that are newline terminated + """ + result = [] + for line in iterable: + line = to_native(line) + line, line_terminated = _line_parse(line) + if not line_terminated: + raise ValueError('unexpected end of line in multipart header') + if not line: + break + elif line[0] in ' \t' and result: + key, value = result[-1] + result[-1] = (key, value + '\n ' + line[1:]) + else: + parts = line.split(':', 1) + if len(parts) == 2: + result.append((parts[0].strip(), parts[1].strip())) + + # we link the list to the headers, no need to create a copy, the + # list was not shared anyways. + return Headers(result) + + +_begin_form = 'begin_form' +_begin_file = 'begin_file' +_cont = 'cont' +_end = 'end' + + +class MultiPartParser(object): + + def __init__(self, stream_factory=None, charset='utf-8', errors='replace', + max_form_memory_size=None, cls=None, buffer_size=64 * 1024): + self.stream_factory = stream_factory + self.charset = charset + self.errors = errors + self.max_form_memory_size = max_form_memory_size + if stream_factory is None: + stream_factory = default_stream_factory + if cls is None: + cls = MultiDict + self.cls = cls + + # make sure the buffer size is divisible by four so that we can base64 + # decode chunk by chunk + assert buffer_size % 4 == 0, 'buffer size has to be divisible by 4' + # also the buffer size has to be at least 1024 bytes long or long headers + # will freak out the system + assert buffer_size >= 1024, 'buffer size has to be at least 1KB' + + self.buffer_size = buffer_size + + def _fix_ie_filename(self, filename): + """Internet Explorer 6 transmits the full file name if a file is + uploaded. This function strips the full path if it thinks the + filename is Windows-like absolute. + """ + if filename[1:3] == ':\\' or filename[:2] == '\\\\': + return filename.split('\\')[-1] + return filename + + def _find_terminator(self, iterator): + """The terminator might have some additional newlines before it. + There is at least one application that sends additional newlines + before headers (the python setuptools package). + """ + for line in iterator: + if not line: + break + line = line.strip() + if line: + return line + return b'' + + def fail(self, message): + raise ValueError(message) + + def get_part_encoding(self, headers): + transfer_encoding = headers.get('content-transfer-encoding') + if transfer_encoding is not None and \ + transfer_encoding in _supported_multipart_encodings: + return transfer_encoding + + def get_part_charset(self, headers): + # Figure out input charset for current part + content_type = headers.get('content-type') + if content_type: + mimetype, ct_params = parse_options_header(content_type) + return ct_params.get('charset', self.charset) + return self.charset + + def start_file_streaming(self, filename, headers, total_content_length): + if isinstance(filename, bytes): + filename = filename.decode(self.charset, self.errors) + filename = self._fix_ie_filename(filename) + content_type = headers.get('content-type') + try: + content_length = int(headers['content-length']) + except (KeyError, ValueError): + content_length = 0 + container = self.stream_factory(total_content_length, content_type, + filename, content_length) + return filename, container + + def in_memory_threshold_reached(self, bytes): + raise exceptions.RequestEntityTooLarge() + + def validate_boundary(self, boundary): + if not boundary: + self.fail('Missing boundary') + if not is_valid_multipart_boundary(boundary): + self.fail('Invalid boundary: %s' % boundary) + if len(boundary) > self.buffer_size: # pragma: no cover + # this should never happen because we check for a minimum size + # of 1024 and boundaries may not be longer than 200. The only + # situation when this happens is for non debug builds where + # the assert is skipped. + self.fail('Boundary longer than buffer size') + + def parse_lines(self, file, boundary, content_length, cap_at_buffer=True): + """Generate parts of + ``('begin_form', (headers, name))`` + ``('begin_file', (headers, name, filename))`` + ``('cont', bytestring)`` + ``('end', None)`` + + Always obeys the grammar + parts = ( begin_form cont* end | + begin_file cont* end )* + """ + next_part = b'--' + boundary + last_part = next_part + b'--' + + iterator = chain(make_line_iter(file, limit=content_length, + buffer_size=self.buffer_size, + cap_at_buffer=cap_at_buffer), + _empty_string_iter) + + terminator = self._find_terminator(iterator) + + if terminator == last_part: + return + elif terminator != next_part: + self.fail('Expected boundary at start of multipart data') + + while terminator != last_part: + headers = parse_multipart_headers(iterator) + + disposition = headers.get('content-disposition') + if disposition is None: + self.fail('Missing Content-Disposition header') + disposition, extra = parse_options_header(disposition) + transfer_encoding = self.get_part_encoding(headers) + name = extra.get('name') + filename = extra.get('filename') + + # if no content type is given we stream into memory. A list is + # used as a temporary container. + if filename is None: + yield _begin_form, (headers, name) + + # otherwise we parse the rest of the headers and ask the stream + # factory for something we can write in. + else: + yield _begin_file, (headers, name, filename) + + buf = b'' + for line in iterator: + if not line: + self.fail('unexpected end of stream') + + if line[:2] == b'--': + terminator = line.rstrip() + if terminator in (next_part, last_part): + break + + if transfer_encoding is not None: + if transfer_encoding == 'base64': + transfer_encoding = 'base64_codec' + try: + line = codecs.decode(line, transfer_encoding) + except Exception: + self.fail('could not decode transfer encoded chunk') + + # we have something in the buffer from the last iteration. + # this is usually a newline delimiter. + if buf: + yield _cont, buf + buf = b'' + + # If the line ends with windows CRLF we write everything except + # the last two bytes. In all other cases however we write + # everything except the last byte. If it was a newline, that's + # fine, otherwise it does not matter because we will write it + # the next iteration. this ensures we do not write the + # final newline into the stream. That way we do not have to + # truncate the stream. However we do have to make sure that + # if something else than a newline is in there we write it + # out. + if line[-2:] == b'\r\n': + buf = b'\r\n' + cutoff = -2 + else: + buf = line[-1:] + cutoff = -1 + yield _cont, line[:cutoff] + + else: # pragma: no cover + raise ValueError('unexpected end of part') + + # if we have a leftover in the buffer that is not a newline + # character we have to flush it, otherwise we will chop of + # certain values. + if buf not in (b'', b'\r', b'\n', b'\r\n'): + yield _cont, buf + + yield _end, None + + def parse_parts(self, file, boundary, content_length): + """Generate ``('file', (name, val))`` and + ``('form', (name, val))`` parts. + """ + in_memory = 0 + + for ellt, ell in self.parse_lines(file, boundary, content_length): + if ellt == _begin_file: + headers, name, filename = ell + is_file = True + guard_memory = False + filename, container = self.start_file_streaming( + filename, headers, content_length) + _write = container.write + + elif ellt == _begin_form: + headers, name = ell + is_file = False + container = [] + _write = container.append + guard_memory = self.max_form_memory_size is not None + + elif ellt == _cont: + _write(ell) + # if we write into memory and there is a memory size limit we + # count the number of bytes in memory and raise an exception if + # there is too much data in memory. + if guard_memory: + in_memory += len(ell) + if in_memory > self.max_form_memory_size: + self.in_memory_threshold_reached(in_memory) + + elif ellt == _end: + if is_file: + container.seek(0) + yield ('file', + (name, FileStorage(container, filename, name, + headers=headers))) + else: + part_charset = self.get_part_charset(headers) + yield ('form', + (name, b''.join(container).decode( + part_charset, self.errors))) + + def parse(self, file, boundary, content_length): + formstream, filestream = tee( + self.parse_parts(file, boundary, content_length), 2) + form = (p[1] for p in formstream if p[0] == 'form') + files = (p[1] for p in filestream if p[0] == 'file') + return self.cls(form), self.cls(files) + + +from werkzeug import exceptions diff --git a/deps/werkzeug/http.py b/deps/werkzeug/http.py new file mode 100644 index 00000000..13a7eb5b --- /dev/null +++ b/deps/werkzeug/http.py @@ -0,0 +1,1022 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.http + ~~~~~~~~~~~~~ + + Werkzeug comes with a bunch of utilities that help Werkzeug to deal with + HTTP data. Most of the classes and functions provided by this module are + used by the wrappers, but they are useful on their own, too, especially if + the response and request objects are not used. + + This covers some of the more HTTP centric features of WSGI, some other + utilities such as cookie handling are documented in the `werkzeug.utils` + module. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +from time import time, gmtime +try: + from email.utils import parsedate_tz +except ImportError: # pragma: no cover + from email.Utils import parsedate_tz +try: + from urllib2 import parse_http_list as _parse_list_header +except ImportError: # pragma: no cover + from urllib.request import parse_http_list as _parse_list_header +from datetime import datetime, timedelta +from hashlib import md5 +import base64 + +from werkzeug._internal import _cookie_quote, _make_cookie_domain, \ + _cookie_parse_impl +from werkzeug._compat import to_unicode, iteritems, text_type, \ + string_types, try_coerce_native, to_bytes, PY2, \ + integer_types + + +_cookie_charset = 'latin1' +# for explanation of "media-range", etc. see Sections 5.3.{1,2} of RFC 7231 +_accept_re = re.compile( + r'''( # media-range capturing-parenthesis + [^\s;,]+ # type/subtype + (?:[ \t]*;[ \t]* # ";" + (?: # parameter non-capturing-parenthesis + [^\s;,q][^\s;,]* # token that doesn't start with "q" + | # or + q[^\s;,=][^\s;,]* # token that is more than just "q" + ) + )* # zero or more parameters + ) # end of media-range + (?:[ \t]*;[ \t]*q= # weight is a "q" parameter + (\d*(?:\.\d+)?) # qvalue capturing-parentheses + [^,]* # "extension" accept params: who cares? + )? # accept params are optional + ''', re.VERBOSE) +_token_chars = frozenset("!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" + '^_`abcdefghijklmnopqrstuvwxyz|~') +_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') +_unsafe_header_chars = set('()<>@,;:\"/[]?={} \t') +_quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"' +_option_header_piece_re = re.compile( + r';\s*(%s|[^\s;,=]+)\s*(?:=\s*(%s|[^;,]+)?)?\s*' % + (_quoted_string_re, _quoted_string_re) +) +_option_header_start_mime_type = re.compile(r',\s*([^;,\s]+)([;,]\s*.+)?') + +_entity_headers = frozenset([ + 'allow', 'content-encoding', 'content-language', 'content-length', + 'content-location', 'content-md5', 'content-range', 'content-type', + 'expires', 'last-modified' +]) +_hop_by_hop_headers = frozenset([ + 'connection', 'keep-alive', 'proxy-authenticate', + 'proxy-authorization', 'te', 'trailer', 'transfer-encoding', + 'upgrade' +]) + + +HTTP_STATUS_CODES = { + 100: 'Continue', + 101: 'Switching Protocols', + 102: 'Processing', + 200: 'OK', + 201: 'Created', + 202: 'Accepted', + 203: 'Non Authoritative Information', + 204: 'No Content', + 205: 'Reset Content', + 206: 'Partial Content', + 207: 'Multi Status', + 226: 'IM Used', # see RFC 3229 + 300: 'Multiple Choices', + 301: 'Moved Permanently', + 302: 'Found', + 303: 'See Other', + 304: 'Not Modified', + 305: 'Use Proxy', + 307: 'Temporary Redirect', + 400: 'Bad Request', + 401: 'Unauthorized', + 402: 'Payment Required', # unused + 403: 'Forbidden', + 404: 'Not Found', + 405: 'Method Not Allowed', + 406: 'Not Acceptable', + 407: 'Proxy Authentication Required', + 408: 'Request Timeout', + 409: 'Conflict', + 410: 'Gone', + 411: 'Length Required', + 412: 'Precondition Failed', + 413: 'Request Entity Too Large', + 414: 'Request URI Too Long', + 415: 'Unsupported Media Type', + 416: 'Requested Range Not Satisfiable', + 417: 'Expectation Failed', + 418: 'I\'m a teapot', # see RFC 2324 + 422: 'Unprocessable Entity', + 423: 'Locked', + 424: 'Failed Dependency', + 426: 'Upgrade Required', + 428: 'Precondition Required', # see RFC 6585 + 429: 'Too Many Requests', + 431: 'Request Header Fields Too Large', + 449: 'Retry With', # proprietary MS extension + 500: 'Internal Server Error', + 501: 'Not Implemented', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + 504: 'Gateway Timeout', + 505: 'HTTP Version Not Supported', + 507: 'Insufficient Storage', + 510: 'Not Extended' +} + + +def wsgi_to_bytes(data): + """coerce wsgi unicode represented bytes to real ones + + """ + if isinstance(data, bytes): + return data + return data.encode('latin1') # XXX: utf8 fallback? + + +def bytes_to_wsgi(data): + assert isinstance(data, bytes), 'data must be bytes' + if isinstance(data, str): + return data + else: + return data.decode('latin1') + + +def quote_header_value(value, extra_chars='', allow_token=True): + """Quote a header value if necessary. + + .. versionadded:: 0.5 + + :param value: the value to quote. + :param extra_chars: a list of extra characters to skip quoting. + :param allow_token: if this is enabled token values are returned + unchanged. + """ + if isinstance(value, bytes): + value = bytes_to_wsgi(value) + value = str(value) + if allow_token: + token_chars = _token_chars | set(extra_chars) + if set(value).issubset(token_chars): + return value + return '"%s"' % value.replace('\\', '\\\\').replace('"', '\\"') + + +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + .. versionadded:: 0.5 + + :param value: the header value to unquote. + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def dump_options_header(header, options): + """The reverse function to :func:`parse_options_header`. + + :param header: the header to dump + :param options: a dict of options to append. + """ + segments = [] + if header is not None: + segments.append(header) + for key, value in iteritems(options): + if value is None: + segments.append(key) + else: + segments.append('%s=%s' % (key, quote_header_value(value))) + return '; '.join(segments) + + +def dump_header(iterable, allow_token=True): + """Dump an HTTP header again. This is the reversal of + :func:`parse_list_header`, :func:`parse_set_header` and + :func:`parse_dict_header`. This also quotes strings that include an + equals sign unless you pass it as dict of key, value pairs. + + >>> dump_header({'foo': 'bar baz'}) + 'foo="bar baz"' + >>> dump_header(('foo', 'bar baz')) + 'foo, "bar baz"' + + :param iterable: the iterable or dict of values to quote. + :param allow_token: if set to `False` tokens as values are disallowed. + See :func:`quote_header_value` for more details. + """ + if isinstance(iterable, dict): + items = [] + for key, value in iteritems(iterable): + if value is None: + items.append(key) + else: + items.append('%s=%s' % ( + key, + quote_header_value(value, allow_token=allow_token) + )) + else: + items = [quote_header_value(x, allow_token=allow_token) + for x in iterable] + return ', '.join(items) + + +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +def parse_dict_header(value, cls=dict): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict (or any other mapping object created from + the type with a dict like interface provided by the `cls` arugment): + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + .. versionchanged:: 0.9 + Added support for `cls` argument. + + :param value: a string with a dict header. + :param cls: callable to use for storage of parsed results. + :return: an instance of `cls` + """ + result = cls() + if not isinstance(value, text_type): + # XXX: validate + value = bytes_to_wsgi(value) + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +def parse_options_header(value, multiple=False): + """Parse a ``Content-Type`` like header into a tuple with the content + type and the options: + + >>> parse_options_header('text/html; charset=utf8') + ('text/html', {'charset': 'utf8'}) + + This should not be used to parse ``Cache-Control`` like headers that use + a slightly different format. For these headers use the + :func:`parse_dict_header` function. + + .. versionadded:: 0.5 + + :param value: the header to parse. + :param multiple: Whether try to parse and return multiple MIME types + :return: (mimetype, options) or (mimetype, options, mimetype, options, …) + if multiple=True + """ + if not value: + return '', {} + + result = [] + + value = "," + value.replace("\n", ",") + while value: + match = _option_header_start_mime_type.match(value) + if not match: + break + result.append(match.group(1)) # mimetype + options = {} + # Parse options + rest = match.group(2) + while rest: + optmatch = _option_header_piece_re.match(rest) + if not optmatch: + break + option, option_value = optmatch.groups() + option = unquote_header_value(option) + if option_value is not None: + option_value = unquote_header_value( + option_value, + option == 'filename') + options[option] = option_value + rest = rest[optmatch.end():] + result.append(options) + if multiple is False: + return tuple(result) + value = rest + + return tuple(result) if result else ('', {}) + + +def parse_accept_header(value, cls=None): + """Parses an HTTP Accept-* header. This does not implement a complete + valid algorithm but one that supports at least value and quality + extraction. + + Returns a new :class:`Accept` object (basically a list of ``(value, quality)`` + tuples sorted by the quality with some additional accessor methods). + + The second parameter can be a subclass of :class:`Accept` that is created + with the parsed values and returned. + + :param value: the accept header string to be parsed. + :param cls: the wrapper class for the return value (can be + :class:`Accept` or a subclass thereof) + :return: an instance of `cls`. + """ + if cls is None: + cls = Accept + + if not value: + return cls(None) + + result = [] + for match in _accept_re.finditer(value): + quality = match.group(2) + if not quality: + quality = 1 + else: + quality = max(min(float(quality), 1), 0) + result.append((match.group(1), quality)) + return cls(result) + + +def parse_cache_control_header(value, on_update=None, cls=None): + """Parse a cache control header. The RFC differs between response and + request cache control, this method does not. It's your responsibility + to not use the wrong control statements. + + .. versionadded:: 0.5 + The `cls` was added. If not specified an immutable + :class:`~werkzeug.datastructures.RequestCacheControl` is returned. + + :param value: a cache control header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.CacheControl` + object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.RequestCacheControl` is used. + :return: a `cls` object. + """ + if cls is None: + cls = RequestCacheControl + if not value: + return cls(None, on_update) + return cls(parse_dict_header(value), on_update) + + +def parse_set_header(value, on_update=None): + """Parse a set-like header and return a + :class:`~werkzeug.datastructures.HeaderSet` object: + + >>> hs = parse_set_header('token, "quoted value"') + + The return value is an object that treats the items case-insensitively + and keeps the order of the items: + + >>> 'TOKEN' in hs + True + >>> hs.index('quoted value') + 1 + >>> hs + HeaderSet(['token', 'quoted value']) + + To create a header from the :class:`HeaderSet` again, use the + :func:`dump_header` function. + + :param value: a set header to be parsed. + :param on_update: an optional callable that is called every time a + value on the :class:`~werkzeug.datastructures.HeaderSet` + object is changed. + :return: a :class:`~werkzeug.datastructures.HeaderSet` + """ + if not value: + return HeaderSet(None, on_update) + return HeaderSet(parse_list_header(value), on_update) + + +def parse_authorization_header(value): + """Parse an HTTP basic/digest authorization header transmitted by the web + browser. The return value is either `None` if the header was invalid or + not given, otherwise an :class:`~werkzeug.datastructures.Authorization` + object. + + :param value: the authorization header to parse. + :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. + """ + if not value: + return + value = wsgi_to_bytes(value) + try: + auth_type, auth_info = value.split(None, 1) + auth_type = auth_type.lower() + except ValueError: + return + if auth_type == b'basic': + try: + username, password = base64.b64decode(auth_info).split(b':', 1) + except Exception: + return + return Authorization('basic', {'username': bytes_to_wsgi(username), + 'password': bytes_to_wsgi(password)}) + elif auth_type == b'digest': + auth_map = parse_dict_header(auth_info) + for key in 'username', 'realm', 'nonce', 'uri', 'response': + if key not in auth_map: + return + if 'qop' in auth_map: + if not auth_map.get('nc') or not auth_map.get('cnonce'): + return + return Authorization('digest', auth_map) + + +def parse_www_authenticate_header(value, on_update=None): + """Parse an HTTP WWW-Authenticate header into a + :class:`~werkzeug.datastructures.WWWAuthenticate` object. + + :param value: a WWW-Authenticate header to parse. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.WWWAuthenticate` + object is changed. + :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. + """ + if not value: + return WWWAuthenticate(on_update=on_update) + try: + auth_type, auth_info = value.split(None, 1) + auth_type = auth_type.lower() + except (ValueError, AttributeError): + return WWWAuthenticate(value.strip().lower(), on_update=on_update) + return WWWAuthenticate(auth_type, parse_dict_header(auth_info), + on_update) + + +def parse_if_range_header(value): + """Parses an if-range header which can be an etag or a date. Returns + a :class:`~werkzeug.datastructures.IfRange` object. + + .. versionadded:: 0.7 + """ + if not value: + return IfRange() + date = parse_date(value) + if date is not None: + return IfRange(date=date) + # drop weakness information + return IfRange(unquote_etag(value)[0]) + + +def parse_range_header(value, make_inclusive=True): + """Parses a range header into a :class:`~werkzeug.datastructures.Range` + object. If the header is missing or malformed `None` is returned. + `ranges` is a list of ``(start, stop)`` tuples where the ranges are + non-inclusive. + + .. versionadded:: 0.7 + """ + if not value or '=' not in value: + return None + + ranges = [] + last_end = 0 + units, rng = value.split('=', 1) + units = units.strip().lower() + + for item in rng.split(','): + item = item.strip() + if '-' not in item: + return None + if item.startswith('-'): + if last_end < 0: + return None + begin = int(item) + end = None + last_end = -1 + elif '-' in item: + begin, end = item.split('-', 1) + begin = int(begin) + if begin < last_end or last_end < 0: + return None + if end: + end = int(end) + 1 + if begin >= end: + return None + else: + end = None + last_end = end + ranges.append((begin, end)) + + return Range(units, ranges) + + +def parse_content_range_header(value, on_update=None): + """Parses a range header into a + :class:`~werkzeug.datastructures.ContentRange` object or `None` if + parsing is not possible. + + .. versionadded:: 0.7 + + :param value: a content range header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.ContentRange` + object is changed. + """ + if value is None: + return None + try: + units, rangedef = (value or '').strip().split(None, 1) + except ValueError: + return None + + if '/' not in rangedef: + return None + rng, length = rangedef.split('/', 1) + if length == '*': + length = None + elif length.isdigit(): + length = int(length) + else: + return None + + if rng == '*': + return ContentRange(units, None, None, length, on_update=on_update) + elif '-' not in rng: + return None + + start, stop = rng.split('-', 1) + try: + start = int(start) + stop = int(stop) + 1 + except ValueError: + return None + + if is_byte_range_valid(start, stop, length): + return ContentRange(units, start, stop, length, on_update=on_update) + + +def quote_etag(etag, weak=False): + """Quote an etag. + + :param etag: the etag to quote. + :param weak: set to `True` to tag it "weak". + """ + if '"' in etag: + raise ValueError('invalid etag') + etag = '"%s"' % etag + if weak: + etag = 'W/' + etag + return etag + + +def unquote_etag(etag): + """Unquote a single etag: + + >>> unquote_etag('W/"bar"') + ('bar', True) + >>> unquote_etag('"bar"') + ('bar', False) + + :param etag: the etag identifier to unquote. + :return: a ``(etag, weak)`` tuple. + """ + if not etag: + return None, None + etag = etag.strip() + weak = False + if etag.startswith(('W/', 'w/')): + weak = True + etag = etag[2:] + if etag[:1] == etag[-1:] == '"': + etag = etag[1:-1] + return etag, weak + + +def parse_etags(value): + """Parse an etag header. + + :param value: the tag header to parse + :return: an :class:`~werkzeug.datastructures.ETags` object. + """ + if not value: + return ETags() + strong = [] + weak = [] + end = len(value) + pos = 0 + while pos < end: + match = _etag_re.match(value, pos) + if match is None: + break + is_weak, quoted, raw = match.groups() + if raw == '*': + return ETags(star_tag=True) + elif quoted: + raw = quoted + if is_weak: + weak.append(raw) + else: + strong.append(raw) + pos = match.end() + return ETags(strong, weak) + + +def generate_etag(data): + """Generate an etag for some data.""" + return md5(data).hexdigest() + + +def parse_date(value): + """Parse one of the following date formats into a datetime object: + + .. sourcecode:: text + + Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123 + Sunday, 06-Nov-94 08:49:37 GMT ; RFC 850, obsoleted by RFC 1036 + Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format + + If parsing fails the return value is `None`. + + :param value: a string with a supported date format. + :return: a :class:`datetime.datetime` object. + """ + if value: + t = parsedate_tz(value.strip()) + if t is not None: + try: + year = t[0] + # unfortunately that function does not tell us if two digit + # years were part of the string, or if they were prefixed + # with two zeroes. So what we do is to assume that 69-99 + # refer to 1900, and everything below to 2000 + if year >= 0 and year <= 68: + year += 2000 + elif year >= 69 and year <= 99: + year += 1900 + return datetime(*((year,) + t[1:7])) - \ + timedelta(seconds=t[-1] or 0) + except (ValueError, OverflowError): + return None + + +def _dump_date(d, delim): + """Used for `http_date` and `cookie_date`.""" + if d is None: + d = gmtime() + elif isinstance(d, datetime): + d = d.utctimetuple() + elif isinstance(d, (integer_types, float)): + d = gmtime(d) + return '%s, %02d%s%s%s%s %02d:%02d:%02d GMT' % ( + ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun')[d.tm_wday], + d.tm_mday, delim, + ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', + 'Oct', 'Nov', 'Dec')[d.tm_mon - 1], + delim, str(d.tm_year), d.tm_hour, d.tm_min, d.tm_sec + ) + + +def cookie_date(expires=None): + """Formats the time to ensure compatibility with Netscape's cookie + standard. + + Accepts a floating point number expressed in seconds since the epoch in, a + datetime object or a timetuple. All times in UTC. The :func:`parse_date` + function can be used to parse such a date. + + Outputs a string in the format ``Wdy, DD-Mon-YYYY HH:MM:SS GMT``. + + :param expires: If provided that date is used, otherwise the current. + """ + return _dump_date(expires, '-') + + +def http_date(timestamp=None): + """Formats the time to match the RFC1123 date format. + + Accepts a floating point number expressed in seconds since the epoch in, a + datetime object or a timetuple. All times in UTC. The :func:`parse_date` + function can be used to parse such a date. + + Outputs a string in the format ``Wdy, DD Mon YYYY HH:MM:SS GMT``. + + :param timestamp: If provided that date is used, otherwise the current. + """ + return _dump_date(timestamp, ' ') + + +def is_resource_modified(environ, etag=None, data=None, last_modified=None): + """Convenience method for conditional requests. + + :param environ: the WSGI environment of the request to be checked. + :param etag: the etag for the response for comparison. + :param data: or alternatively the data of the response to automatically + generate an etag using :func:`generate_etag`. + :param last_modified: an optional date of the last modification. + :return: `True` if the resource was modified, otherwise `False`. + """ + if etag is None and data is not None: + etag = generate_etag(data) + elif data is not None: + raise TypeError('both data and etag given') + if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'): + return False + + unmodified = False + if isinstance(last_modified, string_types): + last_modified = parse_date(last_modified) + + # ensure that microsecond is zero because the HTTP spec does not transmit + # that either and we might have some false positives. See issue #39 + if last_modified is not None: + last_modified = last_modified.replace(microsecond=0) + + modified_since = parse_date(environ.get('HTTP_IF_MODIFIED_SINCE')) + + if modified_since and last_modified and last_modified <= modified_since: + unmodified = True + if etag: + if_none_match = parse_etags(environ.get('HTTP_IF_NONE_MATCH')) + if if_none_match: + # http://tools.ietf.org/html/rfc7232#section-3.2 + # "A recipient MUST use the weak comparison function when comparing + # entity-tags for If-None-Match" + etag, _ = unquote_etag(etag) + unmodified = if_none_match.contains_weak(etag) + + return not unmodified + + +def remove_entity_headers(headers, allowed=('expires', 'content-location')): + """Remove all entity headers from a list or :class:`Headers` object. This + operation works in-place. `Expires` and `Content-Location` headers are + by default not removed. The reason for this is :rfc:`2616` section + 10.3.5 which specifies some entity headers that should be sent. + + .. versionchanged:: 0.5 + added `allowed` parameter. + + :param headers: a list or :class:`Headers` object. + :param allowed: a list of headers that should still be allowed even though + they are entity headers. + """ + allowed = set(x.lower() for x in allowed) + headers[:] = [(key, value) for key, value in headers if + not is_entity_header(key) or key.lower() in allowed] + + +def remove_hop_by_hop_headers(headers): + """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or + :class:`Headers` object. This operation works in-place. + + .. versionadded:: 0.5 + + :param headers: a list or :class:`Headers` object. + """ + headers[:] = [(key, value) for key, value in headers if + not is_hop_by_hop_header(key)] + + +def is_entity_header(header): + """Check if a header is an entity header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an entity header, `False` otherwise. + """ + return header.lower() in _entity_headers + + +def is_hop_by_hop_header(header): + """Check if a header is an HTTP/1.1 "Hop-by-Hop" header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an entity header, `False` otherwise. + """ + return header.lower() in _hop_by_hop_headers + + +def parse_cookie(header, charset='utf-8', errors='replace', cls=None): + """Parse a cookie. Either from a string or WSGI environ. + + Per default encoding errors are ignored. If you want a different behavior + you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a + :exc:`HTTPUnicodeError` is raised. + + .. versionchanged:: 0.5 + This function now returns a :class:`TypeConversionDict` instead of a + regular dict. The `cls` parameter was added. + + :param header: the header to be used to parse the cookie. Alternatively + this can be a WSGI environment. + :param charset: the charset for the cookie values. + :param errors: the error behavior for the charset decoding. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`TypeConversionDict` is + used. + """ + if isinstance(header, dict): + header = header.get('HTTP_COOKIE', '') + elif header is None: + header = '' + + # If the value is an unicode string it's mangled through latin1. This + # is done because on PEP 3333 on Python 3 all headers are assumed latin1 + # which however is incorrect for cookies, which are sent in page encoding. + # As a result we + if isinstance(header, text_type): + header = header.encode('latin1', 'replace') + + if cls is None: + cls = TypeConversionDict + + def _parse_pairs(): + for key, val in _cookie_parse_impl(header): + key = to_unicode(key, charset, errors, allow_none_charset=True) + val = to_unicode(val, charset, errors, allow_none_charset=True) + yield try_coerce_native(key), val + + return cls(_parse_pairs()) + + +def dump_cookie(key, value='', max_age=None, expires=None, path='/', + domain=None, secure=False, httponly=False, + charset='utf-8', sync_expires=True): + """Creates a new Set-Cookie header without the ``Set-Cookie`` prefix + The parameters are the same as in the cookie Morsel object in the + Python standard library but it accepts unicode data, too. + + On Python 3 the return value of this function will be a unicode + string, on Python 2 it will be a native string. In both cases the + return value is usually restricted to ascii as the vast majority of + values are properly escaped, but that is no guarantee. If a unicode + string is returned it's tunneled through latin1 as required by + PEP 3333. + + The return value is not ASCII safe if the key contains unicode + characters. This is technically against the specification but + happens in the wild. It's strongly recommended to not use + non-ASCII values for the keys. + + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. Additionally `timedelta` objects + are accepted, too. + :param expires: should be a `datetime` object or unix timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: Use this if you want to set a cross-domain cookie. For + example, ``domain=".example.com"`` will set a cookie + that is readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: The cookie will only be available via HTTPS + :param httponly: disallow JavaScript to access the cookie. This is an + extension to the cookie standard and probably not + supported by all browsers. + :param charset: the encoding for unicode values. + :param sync_expires: automatically set expires if max_age is defined + but expires not. + """ + key = to_bytes(key, charset) + value = to_bytes(value, charset) + + if path is not None: + path = iri_to_uri(path, charset) + domain = _make_cookie_domain(domain) + if isinstance(max_age, timedelta): + max_age = (max_age.days * 60 * 60 * 24) + max_age.seconds + if expires is not None: + if not isinstance(expires, string_types): + expires = cookie_date(expires) + elif max_age is not None and sync_expires: + expires = to_bytes(cookie_date(time() + max_age)) + + buf = [key + b'=' + _cookie_quote(value)] + + # XXX: In theory all of these parameters that are not marked with `None` + # should be quoted. Because stdlib did not quote it before I did not + # want to introduce quoting there now. + for k, v, q in ((b'Domain', domain, True), + (b'Expires', expires, False,), + (b'Max-Age', max_age, False), + (b'Secure', secure, None), + (b'HttpOnly', httponly, None), + (b'Path', path, False)): + if q is None: + if v: + buf.append(k) + continue + + if v is None: + continue + + tmp = bytearray(k) + if not isinstance(v, (bytes, bytearray)): + v = to_bytes(text_type(v), charset) + if q: + v = _cookie_quote(v) + tmp += b'=' + v + buf.append(bytes(tmp)) + + # The return value will be an incorrectly encoded latin1 header on + # Python 3 for consistency with the headers object and a bytestring + # on Python 2 because that's how the API makes more sense. + rv = b'; '.join(buf) + if not PY2: + rv = rv.decode('latin1') + return rv + + +def is_byte_range_valid(start, stop, length): + """Checks if a given byte content range is valid for the given length. + + .. versionadded:: 0.7 + """ + if (start is None) != (stop is None): + return False + elif start is None: + return length is None or length >= 0 + elif length is None: + return 0 <= start < stop + elif start >= stop: + return False + return 0 <= start < length + + +# circular dependency fun +from werkzeug.datastructures import Accept, HeaderSet, ETags, Authorization, \ + WWWAuthenticate, TypeConversionDict, IfRange, Range, ContentRange, \ + RequestCacheControl + + +# DEPRECATED +# backwards compatible imports +from werkzeug.datastructures import ( # noqa + MIMEAccept, CharsetAccept, LanguageAccept, Headers +) +from werkzeug.urls import iri_to_uri diff --git a/deps/werkzeug/local.py b/deps/werkzeug/local.py new file mode 100644 index 00000000..c8f69219 --- /dev/null +++ b/deps/werkzeug/local.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.local + ~~~~~~~~~~~~~~ + + This module implements context-local objects. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import copy +from functools import update_wrapper +from werkzeug.wsgi import ClosingIterator +from werkzeug._compat import PY2, implements_bool + +# since each thread has its own greenlet we can just use those as identifiers +# for the context. If greenlets are not available we fall back to the +# current thread ident depending on where it is. +try: + from greenlet import getcurrent as get_ident +except ImportError: + try: + from thread import get_ident + except ImportError: + from _thread import get_ident + + +def release_local(local): + """Releases the contents of the local for the current context. + This makes it possible to use locals without a manager. + + Example:: + + >>> loc = Local() + >>> loc.foo = 42 + >>> release_local(loc) + >>> hasattr(loc, 'foo') + False + + With this function one can release :class:`Local` objects as well + as :class:`LocalStack` objects. However it is not possible to + release data held by proxies that way, one always has to retain + a reference to the underlying local object in order to be able + to release it. + + .. versionadded:: 0.6.1 + """ + local.__release_local__() + + +class Local(object): + __slots__ = ('__storage__', '__ident_func__') + + def __init__(self): + object.__setattr__(self, '__storage__', {}) + object.__setattr__(self, '__ident_func__', get_ident) + + def __iter__(self): + return iter(self.__storage__.items()) + + def __call__(self, proxy): + """Create a proxy for a name.""" + return LocalProxy(self, proxy) + + def __release_local__(self): + self.__storage__.pop(self.__ident_func__(), None) + + def __getattr__(self, name): + try: + return self.__storage__[self.__ident_func__()][name] + except KeyError: + raise AttributeError(name) + + def __setattr__(self, name, value): + ident = self.__ident_func__() + storage = self.__storage__ + try: + storage[ident][name] = value + except KeyError: + storage[ident] = {name: value} + + def __delattr__(self, name): + try: + del self.__storage__[self.__ident_func__()][name] + except KeyError: + raise AttributeError(name) + + +class LocalStack(object): + + """This class works similar to a :class:`Local` but keeps a stack + of objects instead. This is best explained with an example:: + + >>> ls = LocalStack() + >>> ls.push(42) + >>> ls.top + 42 + >>> ls.push(23) + >>> ls.top + 23 + >>> ls.pop() + 23 + >>> ls.top + 42 + + They can be force released by using a :class:`LocalManager` or with + the :func:`release_local` function but the correct way is to pop the + item from the stack after using. When the stack is empty it will + no longer be bound to the current context (and as such released). + + By calling the stack without arguments it returns a proxy that resolves to + the topmost item on the stack. + + .. versionadded:: 0.6.1 + """ + + def __init__(self): + self._local = Local() + + def __release_local__(self): + self._local.__release_local__() + + def _get__ident_func__(self): + return self._local.__ident_func__ + + def _set__ident_func__(self, value): + object.__setattr__(self._local, '__ident_func__', value) + __ident_func__ = property(_get__ident_func__, _set__ident_func__) + del _get__ident_func__, _set__ident_func__ + + def __call__(self): + def _lookup(): + rv = self.top + if rv is None: + raise RuntimeError('object unbound') + return rv + return LocalProxy(_lookup) + + def push(self, obj): + """Pushes a new item to the stack""" + rv = getattr(self._local, 'stack', None) + if rv is None: + self._local.stack = rv = [] + rv.append(obj) + return rv + + def pop(self): + """Removes the topmost item from the stack, will return the + old value or `None` if the stack was already empty. + """ + stack = getattr(self._local, 'stack', None) + if stack is None: + return None + elif len(stack) == 1: + release_local(self._local) + return stack[-1] + else: + return stack.pop() + + @property + def top(self): + """The topmost item on the stack. If the stack is empty, + `None` is returned. + """ + try: + return self._local.stack[-1] + except (AttributeError, IndexError): + return None + + +class LocalManager(object): + + """Local objects cannot manage themselves. For that you need a local + manager. You can pass a local manager multiple locals or add them later + by appending them to `manager.locals`. Everytime the manager cleans up + it, will clean up all the data left in the locals for this context. + + The `ident_func` parameter can be added to override the default ident + function for the wrapped locals. + + .. versionchanged:: 0.6.1 + Instead of a manager the :func:`release_local` function can be used + as well. + + .. versionchanged:: 0.7 + `ident_func` was added. + """ + + def __init__(self, locals=None, ident_func=None): + if locals is None: + self.locals = [] + elif isinstance(locals, Local): + self.locals = [locals] + else: + self.locals = list(locals) + if ident_func is not None: + self.ident_func = ident_func + for local in self.locals: + object.__setattr__(local, '__ident_func__', ident_func) + else: + self.ident_func = get_ident + + def get_ident(self): + """Return the context identifier the local objects use internally for + this context. You cannot override this method to change the behavior + but use it to link other context local objects (such as SQLAlchemy's + scoped sessions) to the Werkzeug locals. + + .. versionchanged:: 0.7 + You can pass a different ident function to the local manager that + will then be propagated to all the locals passed to the + constructor. + """ + return self.ident_func() + + def cleanup(self): + """Manually clean up the data in the locals for this context. Call + this at the end of the request or use `make_middleware()`. + """ + for local in self.locals: + release_local(local) + + def make_middleware(self, app): + """Wrap a WSGI application so that cleaning up happens after + request end. + """ + def application(environ, start_response): + return ClosingIterator(app(environ, start_response), self.cleanup) + return application + + def middleware(self, func): + """Like `make_middleware` but for decorating functions. + + Example usage:: + + @manager.middleware + def application(environ, start_response): + ... + + The difference to `make_middleware` is that the function passed + will have all the arguments copied from the inner application + (name, docstring, module). + """ + return update_wrapper(self.make_middleware(func), func) + + def __repr__(self): + return '<%s storages: %d>' % ( + self.__class__.__name__, + len(self.locals) + ) + + +@implements_bool +class LocalProxy(object): + + """Acts as a proxy for a werkzeug local. Forwards all operations to + a proxied object. The only operations not supported for forwarding + are right handed operands and any kind of assignment. + + Example usage:: + + from werkzeug.local import Local + l = Local() + + # these are proxies + request = l('request') + user = l('user') + + + from werkzeug.local import LocalStack + _response_local = LocalStack() + + # this is a proxy + response = _response_local() + + Whenever something is bound to l.user / l.request the proxy objects + will forward all operations. If no object is bound a :exc:`RuntimeError` + will be raised. + + To create proxies to :class:`Local` or :class:`LocalStack` objects, + call the object as shown above. If you want to have a proxy to an + object looked up by a function, you can (as of Werkzeug 0.6.1) pass + a function to the :class:`LocalProxy` constructor:: + + session = LocalProxy(lambda: get_current_request().session) + + .. versionchanged:: 0.6.1 + The class can be instanciated with a callable as well now. + """ + __slots__ = ('__local', '__dict__', '__name__') + + def __init__(self, local, name=None): + object.__setattr__(self, '_LocalProxy__local', local) + object.__setattr__(self, '__name__', name) + + def _get_current_object(self): + """Return the current object. This is useful if you want the real + object behind the proxy at a time for performance reasons or because + you want to pass the object into a different context. + """ + if not hasattr(self.__local, '__release_local__'): + return self.__local() + try: + return getattr(self.__local, self.__name__) + except AttributeError: + raise RuntimeError('no object bound to %s' % self.__name__) + + @property + def __dict__(self): + try: + return self._get_current_object().__dict__ + except RuntimeError: + raise AttributeError('__dict__') + + def __repr__(self): + try: + obj = self._get_current_object() + except RuntimeError: + return '<%s unbound>' % self.__class__.__name__ + return repr(obj) + + def __bool__(self): + try: + return bool(self._get_current_object()) + except RuntimeError: + return False + + def __unicode__(self): + try: + return unicode(self._get_current_object()) # noqa + except RuntimeError: + return repr(self) + + def __dir__(self): + try: + return dir(self._get_current_object()) + except RuntimeError: + return [] + + def __getattr__(self, name): + if name == '__members__': + return dir(self._get_current_object()) + return getattr(self._get_current_object(), name) + + def __setitem__(self, key, value): + self._get_current_object()[key] = value + + def __delitem__(self, key): + del self._get_current_object()[key] + + if PY2: + __getslice__ = lambda x, i, j: x._get_current_object()[i:j] + + def __setslice__(self, i, j, seq): + self._get_current_object()[i:j] = seq + + def __delslice__(self, i, j): + del self._get_current_object()[i:j] + + __setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v) + __delattr__ = lambda x, n: delattr(x._get_current_object(), n) + __str__ = lambda x: str(x._get_current_object()) + __lt__ = lambda x, o: x._get_current_object() < o + __le__ = lambda x, o: x._get_current_object() <= o + __eq__ = lambda x, o: x._get_current_object() == o + __ne__ = lambda x, o: x._get_current_object() != o + __gt__ = lambda x, o: x._get_current_object() > o + __ge__ = lambda x, o: x._get_current_object() >= o + __cmp__ = lambda x, o: cmp(x._get_current_object(), o) # noqa + __hash__ = lambda x: hash(x._get_current_object()) + __call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw) + __len__ = lambda x: len(x._get_current_object()) + __getitem__ = lambda x, i: x._get_current_object()[i] + __iter__ = lambda x: iter(x._get_current_object()) + __contains__ = lambda x, i: i in x._get_current_object() + __add__ = lambda x, o: x._get_current_object() + o + __sub__ = lambda x, o: x._get_current_object() - o + __mul__ = lambda x, o: x._get_current_object() * o + __floordiv__ = lambda x, o: x._get_current_object() // o + __mod__ = lambda x, o: x._get_current_object() % o + __divmod__ = lambda x, o: x._get_current_object().__divmod__(o) + __pow__ = lambda x, o: x._get_current_object() ** o + __lshift__ = lambda x, o: x._get_current_object() << o + __rshift__ = lambda x, o: x._get_current_object() >> o + __and__ = lambda x, o: x._get_current_object() & o + __xor__ = lambda x, o: x._get_current_object() ^ o + __or__ = lambda x, o: x._get_current_object() | o + __div__ = lambda x, o: x._get_current_object().__div__(o) + __truediv__ = lambda x, o: x._get_current_object().__truediv__(o) + __neg__ = lambda x: -(x._get_current_object()) + __pos__ = lambda x: +(x._get_current_object()) + __abs__ = lambda x: abs(x._get_current_object()) + __invert__ = lambda x: ~(x._get_current_object()) + __complex__ = lambda x: complex(x._get_current_object()) + __int__ = lambda x: int(x._get_current_object()) + __long__ = lambda x: long(x._get_current_object()) # noqa + __float__ = lambda x: float(x._get_current_object()) + __oct__ = lambda x: oct(x._get_current_object()) + __hex__ = lambda x: hex(x._get_current_object()) + __index__ = lambda x: x._get_current_object().__index__() + __coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o) + __enter__ = lambda x: x._get_current_object().__enter__() + __exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw) + __radd__ = lambda x, o: o + x._get_current_object() + __rsub__ = lambda x, o: o - x._get_current_object() + __rmul__ = lambda x, o: o * x._get_current_object() + __rdiv__ = lambda x, o: o / x._get_current_object() + if PY2: + __rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o) + else: + __rtruediv__ = __rdiv__ + __rfloordiv__ = lambda x, o: o // x._get_current_object() + __rmod__ = lambda x, o: o % x._get_current_object() + __rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o) + __copy__ = lambda x: copy.copy(x._get_current_object()) + __deepcopy__ = lambda x, memo: copy.deepcopy(x._get_current_object(), memo) diff --git a/deps/werkzeug/posixemulation.py b/deps/werkzeug/posixemulation.py new file mode 100644 index 00000000..8fd6314f --- /dev/null +++ b/deps/werkzeug/posixemulation.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +r""" + werkzeug.posixemulation + ~~~~~~~~~~~~~~~~~~~~~~~ + + Provides a POSIX emulation for some features that are relevant to + web applications. The main purpose is to simplify support for + systems such as Windows NT that are not 100% POSIX compatible. + + Currently this only implements a :func:`rename` function that + follows POSIX semantics. Eg: if the target file already exists it + will be replaced without asking. + + This module was introduced in 0.6.1 and is not a public interface. + It might become one in later versions of Werkzeug. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import sys +import os +import errno +import time +import random + +from ._compat import to_unicode +from .filesystem import get_filesystem_encoding + + +can_rename_open_file = False +if os.name == 'nt': # pragma: no cover + _rename = lambda src, dst: False + _rename_atomic = lambda src, dst: False + + try: + import ctypes + + _MOVEFILE_REPLACE_EXISTING = 0x1 + _MOVEFILE_WRITE_THROUGH = 0x8 + _MoveFileEx = ctypes.windll.kernel32.MoveFileExW + + def _rename(src, dst): + src = to_unicode(src, get_filesystem_encoding()) + dst = to_unicode(dst, get_filesystem_encoding()) + if _rename_atomic(src, dst): + return True + retry = 0 + rv = False + while not rv and retry < 100: + rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | + _MOVEFILE_WRITE_THROUGH) + if not rv: + time.sleep(0.001) + retry += 1 + return rv + + # new in Vista and Windows Server 2008 + _CreateTransaction = ctypes.windll.ktmw32.CreateTransaction + _CommitTransaction = ctypes.windll.ktmw32.CommitTransaction + _MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW + _CloseHandle = ctypes.windll.kernel32.CloseHandle + can_rename_open_file = True + + def _rename_atomic(src, dst): + ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Werkzeug rename') + if ta == -1: + return False + try: + retry = 0 + rv = False + while not rv and retry < 100: + rv = _MoveFileTransacted(src, dst, None, None, + _MOVEFILE_REPLACE_EXISTING | + _MOVEFILE_WRITE_THROUGH, ta) + if rv: + rv = _CommitTransaction(ta) + break + else: + time.sleep(0.001) + retry += 1 + return rv + finally: + _CloseHandle(ta) + except Exception: + pass + + def rename(src, dst): + # Try atomic or pseudo-atomic rename + if _rename(src, dst): + return + # Fall back to "move away and replace" + try: + os.rename(src, dst) + except OSError as e: + if e.errno != errno.EEXIST: + raise + old = "%s-%08x" % (dst, random.randint(0, sys.maxint)) + os.rename(dst, old) + os.rename(src, dst) + try: + os.unlink(old) + except Exception: + pass +else: + rename = os.rename + can_rename_open_file = True diff --git a/deps/werkzeug/routing.py b/deps/werkzeug/routing.py new file mode 100644 index 00000000..7c58c550 --- /dev/null +++ b/deps/werkzeug/routing.py @@ -0,0 +1,1774 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.routing + ~~~~~~~~~~~~~~~~ + + When it comes to combining multiple controller or view functions (however + you want to call them) you need a dispatcher. A simple way would be + applying regular expression tests on the ``PATH_INFO`` and calling + registered callback functions that return the value then. + + This module implements a much more powerful system than simple regular + expression matching because it can also convert values in the URLs and + build URLs. + + Here a simple example that creates an URL map for an application with + two subdomains (www and kb) and some URL rules: + + >>> m = Map([ + ... # Static URLs + ... Rule('/', endpoint='static/index'), + ... Rule('/about', endpoint='static/about'), + ... Rule('/help', endpoint='static/help'), + ... # Knowledge Base + ... Subdomain('kb', [ + ... Rule('/', endpoint='kb/index'), + ... Rule('/browse/', endpoint='kb/browse'), + ... Rule('/browse//', endpoint='kb/browse'), + ... Rule('/browse//', endpoint='kb/browse') + ... ]) + ... ], default_subdomain='www') + + If the application doesn't use subdomains it's perfectly fine to not set + the default subdomain and not use the `Subdomain` rule factory. The endpoint + in the rules can be anything, for example import paths or unique + identifiers. The WSGI application can use those endpoints to get the + handler for that URL. It doesn't have to be a string at all but it's + recommended. + + Now it's possible to create a URL adapter for one of the subdomains and + build URLs: + + >>> c = m.bind('example.com') + >>> c.build("kb/browse", dict(id=42)) + 'http://kb.example.com/browse/42/' + >>> c.build("kb/browse", dict()) + 'http://kb.example.com/browse/' + >>> c.build("kb/browse", dict(id=42, page=3)) + 'http://kb.example.com/browse/42/3' + >>> c.build("static/about") + '/about' + >>> c.build("static/index", force_external=True) + 'http://www.example.com/' + + >>> c = m.bind('example.com', subdomain='kb') + >>> c.build("static/about") + 'http://www.example.com/about' + + The first argument to bind is the server name *without* the subdomain. + Per default it will assume that the script is mounted on the root, but + often that's not the case so you can provide the real mount point as + second argument: + + >>> c = m.bind('example.com', '/applications/example') + + The third argument can be the subdomain, if not given the default + subdomain is used. For more details about binding have a look at the + documentation of the `MapAdapter`. + + And here is how you can match URLs: + + >>> c = m.bind('example.com') + >>> c.match("/") + ('static/index', {}) + >>> c.match("/about") + ('static/about', {}) + >>> c = m.bind('example.com', '/', 'kb') + >>> c.match("/") + ('kb/index', {}) + >>> c.match("/browse/42/23") + ('kb/browse', {'id': 42, 'page': 23}) + + If matching fails you get a `NotFound` exception, if the rule thinks + it's a good idea to redirect (for example because the URL was defined + to have a slash at the end but the request was missing that slash) it + will raise a `RequestRedirect` exception. Both are subclasses of the + `HTTPException` so you can use those errors as responses in the + application. + + If matching succeeded but the URL rule was incompatible to the given + method (for example there were only rules for `GET` and `HEAD` and + routing system tried to match a `POST` request) a `MethodNotAllowed` + method is raised. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import difflib +import re +import uuid +import posixpath + +from pprint import pformat +from threading import Lock + +from werkzeug.urls import url_encode, url_quote, url_join +from werkzeug.utils import redirect, format_string +from werkzeug.exceptions import HTTPException, NotFound, MethodNotAllowed, \ + BadHost +from werkzeug._internal import _get_environ, _encode_idna +from werkzeug._compat import itervalues, iteritems, to_unicode, to_bytes, \ + text_type, string_types, native_string_result, \ + implements_to_string, wsgi_decoding_dance +from werkzeug.datastructures import ImmutableDict, MultiDict + + +_rule_re = re.compile(r''' + (?P[^<]*) # static rule data + < + (?: + (?P[a-zA-Z_][a-zA-Z0-9_]*) # converter name + (?:\((?P.*?)\))? # converter arguments + \: # variable delimiter + )? + (?P[a-zA-Z_][a-zA-Z0-9_]*) # variable name + > +''', re.VERBOSE) +_simple_rule_re = re.compile(r'<([^>]+)>') +_converter_args_re = re.compile(r''' + ((?P\w+)\s*=\s*)? + (?P + True|False| + \d+.\d+| + \d+.| + \d+| + \w+| + [urUR]?(?P"[^"]*?"|'[^']*') + )\s*, +''', re.VERBOSE | re.UNICODE) + + +_PYTHON_CONSTANTS = { + 'None': None, + 'True': True, + 'False': False +} + + +def _pythonize(value): + if value in _PYTHON_CONSTANTS: + return _PYTHON_CONSTANTS[value] + for convert in int, float: + try: + return convert(value) + except ValueError: + pass + if value[:1] == value[-1:] and value[0] in '"\'': + value = value[1:-1] + return text_type(value) + + +def parse_converter_args(argstr): + argstr += ',' + args = [] + kwargs = {} + + for item in _converter_args_re.finditer(argstr): + value = item.group('stringval') + if value is None: + value = item.group('value') + value = _pythonize(value) + if not item.group('name'): + args.append(value) + else: + name = item.group('name') + kwargs[name] = value + + return tuple(args), kwargs + + +def parse_rule(rule): + """Parse a rule and return it as generator. Each iteration yields tuples + in the form ``(converter, arguments, variable)``. If the converter is + `None` it's a static url part, otherwise it's a dynamic one. + + :internal: + """ + pos = 0 + end = len(rule) + do_match = _rule_re.match + used_names = set() + while pos < end: + m = do_match(rule, pos) + if m is None: + break + data = m.groupdict() + if data['static']: + yield None, None, data['static'] + variable = data['variable'] + converter = data['converter'] or 'default' + if variable in used_names: + raise ValueError('variable name %r used twice.' % variable) + used_names.add(variable) + yield converter, data['args'] or None, variable + pos = m.end() + if pos < end: + remaining = rule[pos:] + if '>' in remaining or '<' in remaining: + raise ValueError('malformed url rule: %r' % rule) + yield None, None, remaining + + +class RoutingException(Exception): + + """Special exceptions that require the application to redirect, notifying + about missing urls, etc. + + :internal: + """ + + +class RequestRedirect(HTTPException, RoutingException): + + """Raise if the map requests a redirect. This is for example the case if + `strict_slashes` are activated and an url that requires a trailing slash. + + The attribute `new_url` contains the absolute destination url. + """ + code = 301 + + def __init__(self, new_url): + RoutingException.__init__(self, new_url) + self.new_url = new_url + + def get_response(self, environ): + return redirect(self.new_url, self.code) + + +class RequestSlash(RoutingException): + + """Internal exception.""" + + +class RequestAliasRedirect(RoutingException): + + """This rule is an alias and wants to redirect to the canonical URL.""" + + def __init__(self, matched_values): + self.matched_values = matched_values + + +class BuildError(RoutingException, LookupError): + + """Raised if the build system cannot find a URL for an endpoint with the + values provided. + """ + + def __init__(self, endpoint, values, method, adapter=None): + LookupError.__init__(self, endpoint, values, method) + self.endpoint = endpoint + self.values = values + self.method = method + self.suggested = self.closest_rule(adapter) + + def closest_rule(self, adapter): + def score_rule(rule): + return sum([ + 0.98 * difflib.SequenceMatcher( + None, rule.endpoint, self.endpoint + ).ratio(), + 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), + 0.01 * bool(rule.methods and self.method in rule.methods) + ]) + + if adapter and adapter.map._rules: + return max(adapter.map._rules, key=score_rule) + else: + return None + + def __str__(self): + message = [] + message.append("Could not build url for endpoint %r" % self.endpoint) + if self.method: + message.append(" (%r)" % self.method) + if self.values: + message.append(" with values %r" % sorted(self.values.keys())) + message.append(".") + if self.suggested: + if self.endpoint == self.suggested.endpoint: + if self.method and self.method not in self.suggested.methods: + message.append(" Did you mean to use methods %r?" % sorted( + self.suggested.methods + )) + missing_values = self.suggested.arguments.union( + set(self.suggested.defaults or ()) + ) - set(self.values.keys()) + if missing_values: + message.append( + " Did you forget to specify values %r?" % + sorted(missing_values) + ) + else: + message.append( + " Did you mean %r instead?" % self.suggested.endpoint + ) + return "".join(message) + + +class ValidationError(ValueError): + + """Validation error. If a rule converter raises this exception the rule + does not match the current URL and the next URL is tried. + """ + + +class RuleFactory(object): + + """As soon as you have more complex URL setups it's a good idea to use rule + factories to avoid repetitive tasks. Some of them are builtin, others can + be added by subclassing `RuleFactory` and overriding `get_rules`. + """ + + def get_rules(self, map): + """Subclasses of `RuleFactory` have to override this method and return + an iterable of rules.""" + raise NotImplementedError() + + +class Subdomain(RuleFactory): + + """All URLs provided by this factory have the subdomain set to a + specific domain. For example if you want to use the subdomain for + the current language this can be a good setup:: + + url_map = Map([ + Rule('/', endpoint='#select_language'), + Subdomain('', [ + Rule('/', endpoint='index'), + Rule('/about', endpoint='about'), + Rule('/help', endpoint='help') + ]) + ]) + + All the rules except for the ``'#select_language'`` endpoint will now + listen on a two letter long subdomain that holds the language code + for the current request. + """ + + def __init__(self, subdomain, rules): + self.subdomain = subdomain + self.rules = rules + + def get_rules(self, map): + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.subdomain = self.subdomain + yield rule + + +class Submount(RuleFactory): + + """Like `Subdomain` but prefixes the URL rule with a given string:: + + url_map = Map([ + Rule('/', endpoint='index'), + Submount('/blog', [ + Rule('/', endpoint='blog/index'), + Rule('/entry/', endpoint='blog/show') + ]) + ]) + + Now the rule ``'blog/show'`` matches ``/blog/entry/``. + """ + + def __init__(self, path, rules): + self.path = path.rstrip('/') + self.rules = rules + + def get_rules(self, map): + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.rule = self.path + rule.rule + yield rule + + +class EndpointPrefix(RuleFactory): + + """Prefixes all endpoints (which must be strings for this factory) with + another string. This can be useful for sub applications:: + + url_map = Map([ + Rule('/', endpoint='index'), + EndpointPrefix('blog/', [Submount('/blog', [ + Rule('/', endpoint='index'), + Rule('/entry/', endpoint='show') + ])]) + ]) + """ + + def __init__(self, prefix, rules): + self.prefix = prefix + self.rules = rules + + def get_rules(self, map): + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.endpoint = self.prefix + rule.endpoint + yield rule + + +class RuleTemplate(object): + + """Returns copies of the rules wrapped and expands string templates in + the endpoint, rule, defaults or subdomain sections. + + Here a small example for such a rule template:: + + from werkzeug.routing import Map, Rule, RuleTemplate + + resource = RuleTemplate([ + Rule('/$name/', endpoint='$name.list'), + Rule('/$name/', endpoint='$name.show') + ]) + + url_map = Map([resource(name='user'), resource(name='page')]) + + When a rule template is called the keyword arguments are used to + replace the placeholders in all the string parameters. + """ + + def __init__(self, rules): + self.rules = list(rules) + + def __call__(self, *args, **kwargs): + return RuleTemplateFactory(self.rules, dict(*args, **kwargs)) + + +class RuleTemplateFactory(RuleFactory): + + """A factory that fills in template variables into rules. Used by + `RuleTemplate` internally. + + :internal: + """ + + def __init__(self, rules, context): + self.rules = rules + self.context = context + + def get_rules(self, map): + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + new_defaults = subdomain = None + if rule.defaults: + new_defaults = {} + for key, value in iteritems(rule.defaults): + if isinstance(value, string_types): + value = format_string(value, self.context) + new_defaults[key] = value + if rule.subdomain is not None: + subdomain = format_string(rule.subdomain, self.context) + new_endpoint = rule.endpoint + if isinstance(new_endpoint, string_types): + new_endpoint = format_string(new_endpoint, self.context) + yield Rule( + format_string(rule.rule, self.context), + new_defaults, + subdomain, + rule.methods, + rule.build_only, + new_endpoint, + rule.strict_slashes + ) + + +@implements_to_string +class Rule(RuleFactory): + + """A Rule represents one URL pattern. There are some options for `Rule` + that change the way it behaves and are passed to the `Rule` constructor. + Note that besides the rule-string all arguments *must* be keyword arguments + in order to not break the application on Werkzeug upgrades. + + `string` + Rule strings basically are just normal URL paths with placeholders in + the format ```` where the converter and the + arguments are optional. If no converter is defined the `default` + converter is used which means `string` in the normal configuration. + + URL rules that end with a slash are branch URLs, others are leaves. + If you have `strict_slashes` enabled (which is the default), all + branch URLs that are matched without a trailing slash will trigger a + redirect to the same URL with the missing slash appended. + + The converters are defined on the `Map`. + + `endpoint` + The endpoint for this rule. This can be anything. A reference to a + function, a string, a number etc. The preferred way is using a string + because the endpoint is used for URL generation. + + `defaults` + An optional dict with defaults for other rules with the same endpoint. + This is a bit tricky but useful if you want to have unique URLs:: + + url_map = Map([ + Rule('/all/', defaults={'page': 1}, endpoint='all_entries'), + Rule('/all/page/', endpoint='all_entries') + ]) + + If a user now visits ``http://example.com/all/page/1`` he will be + redirected to ``http://example.com/all/``. If `redirect_defaults` is + disabled on the `Map` instance this will only affect the URL + generation. + + `subdomain` + The subdomain rule string for this rule. If not specified the rule + only matches for the `default_subdomain` of the map. If the map is + not bound to a subdomain this feature is disabled. + + Can be useful if you want to have user profiles on different subdomains + and all subdomains are forwarded to your application:: + + url_map = Map([ + Rule('/', subdomain='', endpoint='user/homepage'), + Rule('/stats', subdomain='', endpoint='user/stats') + ]) + + `methods` + A sequence of http methods this rule applies to. If not specified, all + methods are allowed. For example this can be useful if you want different + endpoints for `POST` and `GET`. If methods are defined and the path + matches but the method matched against is not in this list or in the + list of another rule for that path the error raised is of the type + `MethodNotAllowed` rather than `NotFound`. If `GET` is present in the + list of methods and `HEAD` is not, `HEAD` is added automatically. + + .. versionchanged:: 0.6.1 + `HEAD` is now automatically added to the methods if `GET` is + present. The reason for this is that existing code often did not + work properly in servers not rewriting `HEAD` to `GET` + automatically and it was not documented how `HEAD` should be + treated. This was considered a bug in Werkzeug because of that. + + `strict_slashes` + Override the `Map` setting for `strict_slashes` only for this rule. If + not specified the `Map` setting is used. + + `build_only` + Set this to True and the rule will never match but will create a URL + that can be build. This is useful if you have resources on a subdomain + or folder that are not handled by the WSGI application (like static data) + + `redirect_to` + If given this must be either a string or callable. In case of a + callable it's called with the url adapter that triggered the match and + the values of the URL as keyword arguments and has to return the target + for the redirect, otherwise it has to be a string with placeholders in + rule syntax:: + + def foo_with_slug(adapter, id): + # ask the database for the slug for the old id. this of + # course has nothing to do with werkzeug. + return 'foo/' + Foo.get_slug_for_id(id) + + url_map = Map([ + Rule('/foo/', endpoint='foo'), + Rule('/some/old/url/', redirect_to='foo/'), + Rule('/other/old/url/', redirect_to=foo_with_slug) + ]) + + When the rule is matched the routing system will raise a + `RequestRedirect` exception with the target for the redirect. + + Keep in mind that the URL will be joined against the URL root of the + script so don't use a leading slash on the target URL unless you + really mean root of that domain. + + `alias` + If enabled this rule serves as an alias for another rule with the same + endpoint and arguments. + + `host` + If provided and the URL map has host matching enabled this can be + used to provide a match rule for the whole host. This also means + that the subdomain feature is disabled. + + .. versionadded:: 0.7 + The `alias` and `host` parameters were added. + """ + + def __init__(self, string, defaults=None, subdomain=None, methods=None, + build_only=False, endpoint=None, strict_slashes=None, + redirect_to=None, alias=False, host=None): + if not string.startswith('/'): + raise ValueError('urls must start with a leading slash') + self.rule = string + self.is_leaf = not string.endswith('/') + + self.map = None + self.strict_slashes = strict_slashes + self.subdomain = subdomain + self.host = host + self.defaults = defaults + self.build_only = build_only + self.alias = alias + if methods is None: + self.methods = None + else: + self.methods = set([x.upper() for x in methods]) + if 'HEAD' not in self.methods and 'GET' in self.methods: + self.methods.add('HEAD') + self.endpoint = endpoint + self.redirect_to = redirect_to + + if defaults: + self.arguments = set(map(str, defaults)) + else: + self.arguments = set() + self._trace = self._converters = self._regex = self._weights = None + + def empty(self): + """ + Return an unbound copy of this rule. + + This can be useful if want to reuse an already bound URL for another + map. See ``get_empty_kwargs`` to override what keyword arguments are + provided to the new copy. + """ + return type(self)(self.rule, **self.get_empty_kwargs()) + + def get_empty_kwargs(self): + """ + Provides kwargs for instantiating empty copy with empty() + + Use this method to provide custom keyword arguments to the subclass of + ``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass + has custom keyword arguments that are needed at instantiation. + + Must return a ``dict`` that will be provided as kwargs to the new + instance of ``Rule``, following the initial ``self.rule`` value which + is always provided as the first, required positional argument. + """ + defaults = None + if self.defaults: + defaults = dict(self.defaults) + return dict(defaults=defaults, subdomain=self.subdomain, + methods=self.methods, build_only=self.build_only, + endpoint=self.endpoint, strict_slashes=self.strict_slashes, + redirect_to=self.redirect_to, alias=self.alias, + host=self.host) + + def get_rules(self, map): + yield self + + def refresh(self): + """Rebinds and refreshes the URL. Call this if you modified the + rule in place. + + :internal: + """ + self.bind(self.map, rebind=True) + + def bind(self, map, rebind=False): + """Bind the url to a map and create a regular expression based on + the information from the rule itself and the defaults from the map. + + :internal: + """ + if self.map is not None and not rebind: + raise RuntimeError('url rule %r already bound to map %r' % + (self, self.map)) + self.map = map + if self.strict_slashes is None: + self.strict_slashes = map.strict_slashes + if self.subdomain is None: + self.subdomain = map.default_subdomain + self.compile() + + def get_converter(self, variable_name, converter_name, args, kwargs): + """Looks up the converter for the given parameter. + + .. versionadded:: 0.9 + """ + if converter_name not in self.map.converters: + raise LookupError('the converter %r does not exist' % converter_name) + return self.map.converters[converter_name](self.map, *args, **kwargs) + + def compile(self): + """Compiles the regular expression and stores it.""" + assert self.map is not None, 'rule not bound' + + if self.map.host_matching: + domain_rule = self.host or '' + else: + domain_rule = self.subdomain or '' + + self._trace = [] + self._converters = {} + self._weights = [] + regex_parts = [] + + def _build_regex(rule): + for converter, arguments, variable in parse_rule(rule): + if converter is None: + regex_parts.append(re.escape(variable)) + self._trace.append((False, variable)) + for part in variable.split('/'): + if part: + self._weights.append((0, -len(part))) + else: + if arguments: + c_args, c_kwargs = parse_converter_args(arguments) + else: + c_args = () + c_kwargs = {} + convobj = self.get_converter( + variable, converter, c_args, c_kwargs) + regex_parts.append('(?P<%s>%s)' % (variable, convobj.regex)) + self._converters[variable] = convobj + self._trace.append((True, variable)) + self._weights.append((1, convobj.weight)) + self.arguments.add(str(variable)) + + _build_regex(domain_rule) + regex_parts.append('\\|') + self._trace.append((False, '|')) + _build_regex(self.is_leaf and self.rule or self.rule.rstrip('/')) + if not self.is_leaf: + self._trace.append((False, '/')) + + if self.build_only: + return + regex = r'^%s%s$' % ( + u''.join(regex_parts), + (not self.is_leaf or not self.strict_slashes) and + '(?/?)' or '' + ) + self._regex = re.compile(regex, re.UNICODE) + + def match(self, path): + """Check if the rule matches a given path. Path is a string in the + form ``"subdomain|/path(method)"`` and is assembled by the map. If + the map is doing host matching the subdomain part will be the host + instead. + + If the rule matches a dict with the converted values is returned, + otherwise the return value is `None`. + + :internal: + """ + if not self.build_only: + m = self._regex.search(path) + if m is not None: + groups = m.groupdict() + # we have a folder like part of the url without a trailing + # slash and strict slashes enabled. raise an exception that + # tells the map to redirect to the same url but with a + # trailing slash + if self.strict_slashes and not self.is_leaf and \ + not groups.pop('__suffix__'): + raise RequestSlash() + # if we are not in strict slashes mode we have to remove + # a __suffix__ + elif not self.strict_slashes: + del groups['__suffix__'] + + result = {} + for name, value in iteritems(groups): + try: + value = self._converters[name].to_python(value) + except ValidationError: + return + result[str(name)] = value + if self.defaults: + result.update(self.defaults) + + if self.alias and self.map.redirect_defaults: + raise RequestAliasRedirect(result) + + return result + + def build(self, values, append_unknown=True): + """Assembles the relative url for that rule and the subdomain. + If building doesn't work for some reasons `None` is returned. + + :internal: + """ + tmp = [] + add = tmp.append + processed = set(self.arguments) + for is_dynamic, data in self._trace: + if is_dynamic: + try: + add(self._converters[data].to_url(values[data])) + except ValidationError: + return + processed.add(data) + else: + add(url_quote(to_bytes(data, self.map.charset), safe='/:|+')) + domain_part, url = (u''.join(tmp)).split(u'|', 1) + + if append_unknown: + query_vars = MultiDict(values) + for key in processed: + if key in query_vars: + del query_vars[key] + + if query_vars: + url += u'?' + url_encode(query_vars, charset=self.map.charset, + sort=self.map.sort_parameters, + key=self.map.sort_key) + + return domain_part, url + + def provides_defaults_for(self, rule): + """Check if this rule has defaults for a given rule. + + :internal: + """ + return not self.build_only and self.defaults and \ + self.endpoint == rule.endpoint and self != rule and \ + self.arguments == rule.arguments + + def suitable_for(self, values, method=None): + """Check if the dict of values has enough data for url generation. + + :internal: + """ + # if a method was given explicitly and that method is not supported + # by this rule, this rule is not suitable. + if method is not None and self.methods is not None \ + and method not in self.methods: + return False + + defaults = self.defaults or () + + # all arguments required must be either in the defaults dict or + # the value dictionary otherwise it's not suitable + for key in self.arguments: + if key not in defaults and key not in values: + return False + + # in case defaults are given we ensure taht either the value was + # skipped or the value is the same as the default value. + if defaults: + for key, value in iteritems(defaults): + if key in values and value != values[key]: + return False + + return True + + def match_compare_key(self): + """The match compare key for sorting. + + Current implementation: + + 1. rules without any arguments come first for performance + reasons only as we expect them to match faster and some + common ones usually don't have any arguments (index pages etc.) + 2. The more complex rules come first so the second argument is the + negative length of the number of weights. + 3. lastly we order by the actual weights. + + :internal: + """ + return bool(self.arguments), -len(self._weights), self._weights + + def build_compare_key(self): + """The build compare key for sorting. + + :internal: + """ + return self.alias and 1 or 0, -len(self.arguments), \ + -len(self.defaults or ()) + + def __eq__(self, other): + return self.__class__ is other.__class__ and \ + self._trace == other._trace + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + return self.rule + + @native_string_result + def __repr__(self): + if self.map is None: + return u'<%s (unbound)>' % self.__class__.__name__ + tmp = [] + for is_dynamic, data in self._trace: + if is_dynamic: + tmp.append(u'<%s>' % data) + else: + tmp.append(data) + return u'<%s %s%s -> %s>' % ( + self.__class__.__name__, + repr((u''.join(tmp)).lstrip(u'|')).lstrip(u'u'), + self.methods is not None + and u' (%s)' % u', '.join(self.methods) + or u'', + self.endpoint + ) + + +class BaseConverter(object): + + """Base class for all converters.""" + regex = '[^/]+' + weight = 100 + + def __init__(self, map): + self.map = map + + def to_python(self, value): + return value + + def to_url(self, value): + return url_quote(value, charset=self.map.charset) + + +class UnicodeConverter(BaseConverter): + + """This converter is the default converter and accepts any string but + only one path segment. Thus the string can not include a slash. + + This is the default validator. + + Example:: + + Rule('/pages/'), + Rule('/') + + :param map: the :class:`Map`. + :param minlength: the minimum length of the string. Must be greater + or equal 1. + :param maxlength: the maximum length of the string. + :param length: the exact length of the string. + """ + + def __init__(self, map, minlength=1, maxlength=None, length=None): + BaseConverter.__init__(self, map) + if length is not None: + length = '{%d}' % int(length) + else: + if maxlength is None: + maxlength = '' + else: + maxlength = int(maxlength) + length = '{%s,%s}' % ( + int(minlength), + maxlength + ) + self.regex = '[^/]' + length + + +class AnyConverter(BaseConverter): + + """Matches one of the items provided. Items can either be Python + identifiers or strings:: + + Rule('/') + + :param map: the :class:`Map`. + :param items: this function accepts the possible items as positional + arguments. + """ + + def __init__(self, map, *items): + BaseConverter.__init__(self, map) + self.regex = '(?:%s)' % '|'.join([re.escape(x) for x in items]) + + +class PathConverter(BaseConverter): + + """Like the default :class:`UnicodeConverter`, but it also matches + slashes. This is useful for wikis and similar applications:: + + Rule('/') + Rule('//edit') + + :param map: the :class:`Map`. + """ + regex = '[^/].*?' + weight = 200 + + +class NumberConverter(BaseConverter): + + """Baseclass for `IntegerConverter` and `FloatConverter`. + + :internal: + """ + weight = 50 + + def __init__(self, map, fixed_digits=0, min=None, max=None): + BaseConverter.__init__(self, map) + self.fixed_digits = fixed_digits + self.min = min + self.max = max + + def to_python(self, value): + if (self.fixed_digits and len(value) != self.fixed_digits): + raise ValidationError() + value = self.num_convert(value) + if (self.min is not None and value < self.min) or \ + (self.max is not None and value > self.max): + raise ValidationError() + return value + + def to_url(self, value): + value = self.num_convert(value) + if self.fixed_digits: + value = ('%%0%sd' % self.fixed_digits) % value + return str(value) + + +class IntegerConverter(NumberConverter): + + """This converter only accepts integer values:: + + Rule('/page/') + + This converter does not support negative values. + + :param map: the :class:`Map`. + :param fixed_digits: the number of fixed digits in the URL. If you set + this to ``4`` for example, the application will + only match if the url looks like ``/0001/``. The + default is variable length. + :param min: the minimal value. + :param max: the maximal value. + """ + regex = r'\d+' + num_convert = int + + +class FloatConverter(NumberConverter): + + """This converter only accepts floating point values:: + + Rule('/probability/') + + This converter does not support negative values. + + :param map: the :class:`Map`. + :param min: the minimal value. + :param max: the maximal value. + """ + regex = r'\d+\.\d+' + num_convert = float + + def __init__(self, map, min=None, max=None): + NumberConverter.__init__(self, map, 0, min, max) + + +class UUIDConverter(BaseConverter): + + """This converter only accepts UUID strings:: + + Rule('/object/') + + .. versionadded:: 0.10 + + :param map: the :class:`Map`. + """ + regex = r'[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-' \ + r'[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}' + + def to_python(self, value): + return uuid.UUID(value) + + def to_url(self, value): + return str(value) + + +#: the default converter mapping for the map. +DEFAULT_CONVERTERS = { + 'default': UnicodeConverter, + 'string': UnicodeConverter, + 'any': AnyConverter, + 'path': PathConverter, + 'int': IntegerConverter, + 'float': FloatConverter, + 'uuid': UUIDConverter, +} + + +class Map(object): + + """The map class stores all the URL rules and some configuration + parameters. Some of the configuration values are only stored on the + `Map` instance since those affect all rules, others are just defaults + and can be overridden for each rule. Note that you have to specify all + arguments besides the `rules` as keyword arguments! + + :param rules: sequence of url rules for this map. + :param default_subdomain: The default subdomain for rules without a + subdomain defined. + :param charset: charset of the url. defaults to ``"utf-8"`` + :param strict_slashes: Take care of trailing slashes. + :param redirect_defaults: This will redirect to the default rule if it + wasn't visited that way. This helps creating + unique URLs. + :param converters: A dict of converters that adds additional converters + to the list of converters. If you redefine one + converter this will override the original one. + :param sort_parameters: If set to `True` the url parameters are sorted. + See `url_encode` for more details. + :param sort_key: The sort key function for `url_encode`. + :param encoding_errors: the error method to use for decoding + :param host_matching: if set to `True` it enables the host matching + feature and disables the subdomain one. If + enabled the `host` parameter to rules is used + instead of the `subdomain` one. + + .. versionadded:: 0.5 + `sort_parameters` and `sort_key` was added. + + .. versionadded:: 0.7 + `encoding_errors` and `host_matching` was added. + """ + + #: .. versionadded:: 0.6 + #: a dict of default converters to be used. + default_converters = ImmutableDict(DEFAULT_CONVERTERS) + + def __init__(self, rules=None, default_subdomain='', charset='utf-8', + strict_slashes=True, redirect_defaults=True, + converters=None, sort_parameters=False, sort_key=None, + encoding_errors='replace', host_matching=False): + self._rules = [] + self._rules_by_endpoint = {} + self._remap = True + self._remap_lock = Lock() + + self.default_subdomain = default_subdomain + self.charset = charset + self.encoding_errors = encoding_errors + self.strict_slashes = strict_slashes + self.redirect_defaults = redirect_defaults + self.host_matching = host_matching + + self.converters = self.default_converters.copy() + if converters: + self.converters.update(converters) + + self.sort_parameters = sort_parameters + self.sort_key = sort_key + + for rulefactory in rules or (): + self.add(rulefactory) + + def is_endpoint_expecting(self, endpoint, *arguments): + """Iterate over all rules and check if the endpoint expects + the arguments provided. This is for example useful if you have + some URLs that expect a language code and others that do not and + you want to wrap the builder a bit so that the current language + code is automatically added if not provided but endpoints expect + it. + + :param endpoint: the endpoint to check. + :param arguments: this function accepts one or more arguments + as positional arguments. Each one of them is + checked. + """ + self.update() + arguments = set(arguments) + for rule in self._rules_by_endpoint[endpoint]: + if arguments.issubset(rule.arguments): + return True + return False + + def iter_rules(self, endpoint=None): + """Iterate over all rules or the rules of an endpoint. + + :param endpoint: if provided only the rules for that endpoint + are returned. + :return: an iterator + """ + self.update() + if endpoint is not None: + return iter(self._rules_by_endpoint[endpoint]) + return iter(self._rules) + + def add(self, rulefactory): + """Add a new rule or factory to the map and bind it. Requires that the + rule is not bound to another map. + + :param rulefactory: a :class:`Rule` or :class:`RuleFactory` + """ + for rule in rulefactory.get_rules(self): + rule.bind(self) + self._rules.append(rule) + self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule) + self._remap = True + + def bind(self, server_name, script_name=None, subdomain=None, + url_scheme='http', default_method='GET', path_info=None, + query_args=None): + """Return a new :class:`MapAdapter` with the details specified to the + call. Note that `script_name` will default to ``'/'`` if not further + specified or `None`. The `server_name` at least is a requirement + because the HTTP RFC requires absolute URLs for redirects and so all + redirect exceptions raised by Werkzeug will contain the full canonical + URL. + + If no path_info is passed to :meth:`match` it will use the default path + info passed to bind. While this doesn't really make sense for + manual bind calls, it's useful if you bind a map to a WSGI + environment which already contains the path info. + + `subdomain` will default to the `default_subdomain` for this map if + no defined. If there is no `default_subdomain` you cannot use the + subdomain feature. + + .. versionadded:: 0.7 + `query_args` added + + .. versionadded:: 0.8 + `query_args` can now also be a string. + """ + server_name = server_name.lower() + if self.host_matching: + if subdomain is not None: + raise RuntimeError('host matching enabled and a ' + 'subdomain was provided') + elif subdomain is None: + subdomain = self.default_subdomain + if script_name is None: + script_name = '/' + try: + server_name = _encode_idna(server_name) + except UnicodeError: + raise BadHost() + return MapAdapter(self, server_name, script_name, subdomain, + url_scheme, path_info, default_method, query_args) + + def bind_to_environ(self, environ, server_name=None, subdomain=None): + """Like :meth:`bind` but you can pass it an WSGI environment and it + will fetch the information from that dictionary. Note that because of + limitations in the protocol there is no way to get the current + subdomain and real `server_name` from the environment. If you don't + provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or + `HTTP_HOST` if provided) as used `server_name` with disabled subdomain + feature. + + If `subdomain` is `None` but an environment and a server name is + provided it will calculate the current subdomain automatically. + Example: `server_name` is ``'example.com'`` and the `SERVER_NAME` + in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated + subdomain will be ``'staging.dev'``. + + If the object passed as environ has an environ attribute, the value of + this attribute is used instead. This allows you to pass request + objects. Additionally `PATH_INFO` added as a default of the + :class:`MapAdapter` so that you don't have to pass the path info to + the match method. + + .. versionchanged:: 0.5 + previously this method accepted a bogus `calculate_subdomain` + parameter that did not have any effect. It was removed because + of that. + + .. versionchanged:: 0.8 + This will no longer raise a ValueError when an unexpected server + name was passed. + + :param environ: a WSGI environment. + :param server_name: an optional server name hint (see above). + :param subdomain: optionally the current subdomain (see above). + """ + environ = _get_environ(environ) + + if 'HTTP_HOST' in environ: + wsgi_server_name = environ['HTTP_HOST'] + + if environ['wsgi.url_scheme'] == 'http' \ + and wsgi_server_name.endswith(':80'): + wsgi_server_name = wsgi_server_name[:-3] + elif environ['wsgi.url_scheme'] == 'https' \ + and wsgi_server_name.endswith(':443'): + wsgi_server_name = wsgi_server_name[:-4] + else: + wsgi_server_name = environ['SERVER_NAME'] + + if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \ + in (('https', '443'), ('http', '80')): + wsgi_server_name += ':' + environ['SERVER_PORT'] + + wsgi_server_name = wsgi_server_name.lower() + + if server_name is None: + server_name = wsgi_server_name + else: + server_name = server_name.lower() + + if subdomain is None and not self.host_matching: + cur_server_name = wsgi_server_name.split('.') + real_server_name = server_name.split('.') + offset = -len(real_server_name) + if cur_server_name[offset:] != real_server_name: + # This can happen even with valid configs if the server was + # accesssed directly by IP address under some situations. + # Instead of raising an exception like in Werkzeug 0.7 or + # earlier we go by an invalid subdomain which will result + # in a 404 error on matching. + subdomain = '' + else: + subdomain = '.'.join(filter(None, cur_server_name[:offset])) + + def _get_wsgi_string(name): + val = environ.get(name) + if val is not None: + return wsgi_decoding_dance(val, self.charset) + + script_name = _get_wsgi_string('SCRIPT_NAME') + path_info = _get_wsgi_string('PATH_INFO') + query_args = _get_wsgi_string('QUERY_STRING') + return Map.bind(self, server_name, script_name, + subdomain, environ['wsgi.url_scheme'], + environ['REQUEST_METHOD'], path_info, + query_args=query_args) + + def update(self): + """Called before matching and building to keep the compiled rules + in the correct order after things changed. + """ + if not self._remap: + return + + with self._remap_lock: + if not self._remap: + return + + self._rules.sort(key=lambda x: x.match_compare_key()) + for rules in itervalues(self._rules_by_endpoint): + rules.sort(key=lambda x: x.build_compare_key()) + self._remap = False + + def __repr__(self): + rules = self.iter_rules() + return '%s(%s)' % (self.__class__.__name__, pformat(list(rules))) + + +class MapAdapter(object): + + """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does + the URL matching and building based on runtime information. + """ + + def __init__(self, map, server_name, script_name, subdomain, + url_scheme, path_info, default_method, query_args=None): + self.map = map + self.server_name = to_unicode(server_name) + script_name = to_unicode(script_name) + if not script_name.endswith(u'/'): + script_name += u'/' + self.script_name = script_name + self.subdomain = to_unicode(subdomain) + self.url_scheme = to_unicode(url_scheme) + self.path_info = to_unicode(path_info) + self.default_method = to_unicode(default_method) + self.query_args = query_args + + def dispatch(self, view_func, path_info=None, method=None, + catch_http_exceptions=False): + """Does the complete dispatching process. `view_func` is called with + the endpoint and a dict with the values for the view. It should + look up the view function, call it, and return a response object + or WSGI application. http exceptions are not caught by default + so that applications can display nicer error messages by just + catching them by hand. If you want to stick with the default + error messages you can pass it ``catch_http_exceptions=True`` and + it will catch the http exceptions. + + Here a small example for the dispatch usage:: + + from werkzeug.wrappers import Request, Response + from werkzeug.wsgi import responder + from werkzeug.routing import Map, Rule + + def on_index(request): + return Response('Hello from the index') + + url_map = Map([Rule('/', endpoint='index')]) + views = {'index': on_index} + + @responder + def application(environ, start_response): + request = Request(environ) + urls = url_map.bind_to_environ(environ) + return urls.dispatch(lambda e, v: views[e](request, **v), + catch_http_exceptions=True) + + Keep in mind that this method might return exception objects, too, so + use :class:`Response.force_type` to get a response object. + + :param view_func: a function that is called with the endpoint as + first argument and the value dict as second. Has + to dispatch to the actual view function with this + information. (see above) + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param catch_http_exceptions: set to `True` to catch any of the + werkzeug :class:`HTTPException`\s. + """ + try: + try: + endpoint, args = self.match(path_info, method) + except RequestRedirect as e: + return e + return view_func(endpoint, args) + except HTTPException as e: + if catch_http_exceptions: + return e + raise + + def match(self, path_info=None, method=None, return_rule=False, + query_args=None): + """The usage is simple: you just pass the match method the current + path info as well as the method (which defaults to `GET`). The + following things can then happen: + + - you receive a `NotFound` exception that indicates that no URL is + matching. A `NotFound` exception is also a WSGI application you + can call to get a default page not found page (happens to be the + same object as `werkzeug.exceptions.NotFound`) + + - you receive a `MethodNotAllowed` exception that indicates that there + is a match for this URL but not for the current request method. + This is useful for RESTful applications. + + - you receive a `RequestRedirect` exception with a `new_url` + attribute. This exception is used to notify you about a request + Werkzeug requests from your WSGI application. This is for example the + case if you request ``/foo`` although the correct URL is ``/foo/`` + You can use the `RequestRedirect` instance as response-like object + similar to all other subclasses of `HTTPException`. + + - you get a tuple in the form ``(endpoint, arguments)`` if there is + a match (unless `return_rule` is True, in which case you get a tuple + in the form ``(rule, arguments)``) + + If the path info is not passed to the match method the default path + info of the map is used (defaults to the root URL if not defined + explicitly). + + All of the exceptions raised are subclasses of `HTTPException` so they + can be used as WSGI responses. The will all render generic error or + redirect pages. + + Here is a small example for matching: + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.match("/", "GET") + ('index', {}) + >>> urls.match("/downloads/42") + ('downloads/show', {'id': 42}) + + And here is what happens on redirect and missing URLs: + + >>> urls.match("/downloads") + Traceback (most recent call last): + ... + RequestRedirect: http://example.com/downloads/ + >>> urls.match("/missing") + Traceback (most recent call last): + ... + NotFound: 404 Not Found + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param return_rule: return the rule that matched instead of just the + endpoint (defaults to `False`). + :param query_args: optional query arguments that are used for + automatic redirects as string or dictionary. It's + currently not possible to use the query arguments + for URL matching. + + .. versionadded:: 0.6 + `return_rule` was added. + + .. versionadded:: 0.7 + `query_args` was added. + + .. versionchanged:: 0.8 + `query_args` can now also be a string. + """ + self.map.update() + if path_info is None: + path_info = self.path_info + else: + path_info = to_unicode(path_info, self.map.charset) + if query_args is None: + query_args = self.query_args + method = (method or self.default_method).upper() + + path = u'%s|%s' % ( + self.map.host_matching and self.server_name or self.subdomain, + path_info and '/%s' % path_info.lstrip('/') + ) + + have_match_for = set() + for rule in self.map._rules: + try: + rv = rule.match(path) + except RequestSlash: + raise RequestRedirect(self.make_redirect_url( + url_quote(path_info, self.map.charset, + safe='/:|+') + '/', query_args)) + except RequestAliasRedirect as e: + raise RequestRedirect(self.make_alias_redirect_url( + path, rule.endpoint, e.matched_values, method, query_args)) + if rv is None: + continue + if rule.methods is not None and method not in rule.methods: + have_match_for.update(rule.methods) + continue + + if self.map.redirect_defaults: + redirect_url = self.get_default_redirect(rule, method, rv, + query_args) + if redirect_url is not None: + raise RequestRedirect(redirect_url) + + if rule.redirect_to is not None: + if isinstance(rule.redirect_to, string_types): + def _handle_match(match): + value = rv[match.group(1)] + return rule._converters[match.group(1)].to_url(value) + redirect_url = _simple_rule_re.sub(_handle_match, + rule.redirect_to) + else: + redirect_url = rule.redirect_to(self, **rv) + raise RequestRedirect(str(url_join('%s://%s%s%s' % ( + self.url_scheme or 'http', + self.subdomain and self.subdomain + '.' or '', + self.server_name, + self.script_name + ), redirect_url))) + + if return_rule: + return rule, rv + else: + return rule.endpoint, rv + + if have_match_for: + raise MethodNotAllowed(valid_methods=list(have_match_for)) + raise NotFound() + + def test(self, path_info=None, method=None): + """Test if a rule would match. Works like `match` but returns `True` + if the URL matches, or `False` if it does not exist. + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + """ + try: + self.match(path_info, method) + except RequestRedirect: + pass + except HTTPException: + return False + return True + + def allowed_methods(self, path_info=None): + """Returns the valid methods that match for a given path. + + .. versionadded:: 0.7 + """ + try: + self.match(path_info, method='--') + except MethodNotAllowed as e: + return e.valid_methods + except HTTPException as e: + pass + return [] + + def get_host(self, domain_part): + """Figures out the full host name for the given domain part. The + domain part is a subdomain in case host matching is disabled or + a full host name. + """ + if self.map.host_matching: + if domain_part is None: + return self.server_name + return to_unicode(domain_part, 'ascii') + subdomain = domain_part + if subdomain is None: + subdomain = self.subdomain + else: + subdomain = to_unicode(subdomain, 'ascii') + return (subdomain and subdomain + u'.' or u'') + self.server_name + + def get_default_redirect(self, rule, method, values, query_args): + """A helper that returns the URL to redirect to if it finds one. + This is used for default redirecting only. + + :internal: + """ + assert self.map.redirect_defaults + for r in self.map._rules_by_endpoint[rule.endpoint]: + # every rule that comes after this one, including ourself + # has a lower priority for the defaults. We order the ones + # with the highest priority up for building. + if r is rule: + break + if r.provides_defaults_for(rule) and \ + r.suitable_for(values, method): + values.update(r.defaults) + domain_part, path = r.build(values) + return self.make_redirect_url( + path, query_args, domain_part=domain_part) + + def encode_query_args(self, query_args): + if not isinstance(query_args, string_types): + query_args = url_encode(query_args, self.map.charset) + return query_args + + def make_redirect_url(self, path_info, query_args=None, domain_part=None): + """Creates a redirect URL. + + :internal: + """ + suffix = '' + if query_args: + suffix = '?' + self.encode_query_args(query_args) + return str('%s://%s/%s%s' % ( + self.url_scheme or 'http', + self.get_host(domain_part), + posixpath.join(self.script_name[:-1].lstrip('/'), + path_info.lstrip('/')), + suffix + )) + + def make_alias_redirect_url(self, path, endpoint, values, method, query_args): + """Internally called to make an alias redirect URL.""" + url = self.build(endpoint, values, method, append_unknown=False, + force_external=True) + if query_args: + url += '?' + self.encode_query_args(query_args) + assert url != path, 'detected invalid alias setting. No canonical ' \ + 'URL found' + return url + + def _partial_build(self, endpoint, values, method, append_unknown): + """Helper for :meth:`build`. Returns subdomain and path for the + rule that accepts this endpoint, values and method. + + :internal: + """ + # in case the method is none, try with the default method first + if method is None: + rv = self._partial_build(endpoint, values, self.default_method, + append_unknown) + if rv is not None: + return rv + + # default method did not match or a specific method is passed, + # check all and go with first result. + for rule in self.map._rules_by_endpoint.get(endpoint, ()): + if rule.suitable_for(values, method): + rv = rule.build(values, append_unknown) + if rv is not None: + return rv + + def build(self, endpoint, values=None, method=None, force_external=False, + append_unknown=True): + """Building URLs works pretty much the other way round. Instead of + `match` you call `build` and pass it the endpoint and a dict of + arguments for the placeholders. + + The `build` function also accepts an argument called `force_external` + which, if you set it to `True` will force external URLs. Per default + external URLs (include the server name) will only be used if the + target URL is on a different subdomain. + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.build("index", {}) + '/' + >>> urls.build("downloads/show", {'id': 42}) + '/downloads/42' + >>> urls.build("downloads/show", {'id': 42}, force_external=True) + 'http://example.com/downloads/42' + + Because URLs cannot contain non ASCII data you will always get + bytestrings back. Non ASCII characters are urlencoded with the + charset defined on the map instance. + + Additional values are converted to unicode and appended to the URL as + URL querystring parameters: + + >>> urls.build("index", {'q': 'My Searchstring'}) + '/?q=My+Searchstring' + + When processing those additional values, lists are furthermore + interpreted as multiple values (as per + :py:class:`werkzeug.datastructures.MultiDict`): + + >>> urls.build("index", {'q': ['a', 'b', 'c']}) + '/?q=a&q=b&q=c' + + If a rule does not exist when building a `BuildError` exception is + raised. + + The build method accepts an argument called `method` which allows you + to specify the method you want to have an URL built for if you have + different methods for the same endpoint specified. + + .. versionadded:: 0.6 + the `append_unknown` parameter was added. + + :param endpoint: the endpoint of the URL to build. + :param values: the values for the URL to build. Unhandled values are + appended to the URL as query parameters. + :param method: the HTTP method for the rule if there are different + URLs for different methods on the same endpoint. + :param force_external: enforce full canonical external URLs. If the URL + scheme is not provided, this will generate + a protocol-relative URL. + :param append_unknown: unknown parameters are appended to the generated + URL as query string argument. Disable this + if you want the builder to ignore those. + """ + self.map.update() + if values: + if isinstance(values, MultiDict): + valueiter = iteritems(values, multi=True) + else: + valueiter = iteritems(values) + values = dict((k, v) for k, v in valueiter if v is not None) + else: + values = {} + + rv = self._partial_build(endpoint, values, method, append_unknown) + if rv is None: + raise BuildError(endpoint, values, method, self) + domain_part, path = rv + + host = self.get_host(domain_part) + + # shortcut this. + if not force_external and ( + (self.map.host_matching and host == self.server_name) or + (not self.map.host_matching and domain_part == self.subdomain) + ): + return str(url_join(self.script_name, './' + path.lstrip('/'))) + return str('%s//%s%s/%s' % ( + self.url_scheme + ':' if self.url_scheme else '', + host, + self.script_name[:-1], + path.lstrip('/') + )) diff --git a/deps/werkzeug/script.py b/deps/werkzeug/script.py new file mode 100644 index 00000000..cfff3c11 --- /dev/null +++ b/deps/werkzeug/script.py @@ -0,0 +1,318 @@ +# -*- coding: utf-8 -*- +r''' + werkzeug.script + ~~~~~~~~~~~~~~~ + + .. admonition:: Deprecated Functionality + + ``werkzeug.script`` is deprecated without replacement functionality. + Python's command line support improved greatly with :mod:`argparse` + and a bunch of alternative modules. + + Most of the time you have recurring tasks while writing an application + such as starting up an interactive python interpreter with some prefilled + imports, starting the development server, initializing the database or + something similar. + + For that purpose werkzeug provides the `werkzeug.script` module which + helps you writing such scripts. + + + Basic Usage + ----------- + + The following snippet is roughly the same in every werkzeug script:: + + #!/usr/bin/env python + # -*- coding: utf-8 -*- + from werkzeug import script + + # actions go here + + if __name__ == '__main__': + script.run() + + Starting this script now does nothing because no actions are defined. + An action is a function in the same module starting with ``"action_"`` + which takes a number of arguments where every argument has a default. The + type of the default value specifies the type of the argument. + + Arguments can then be passed by position or using ``--name=value`` from + the shell. + + Because a runserver and shell command is pretty common there are two + factory functions that create such commands:: + + def make_app(): + from yourapplication import YourApplication + return YourApplication(...) + + action_runserver = script.make_runserver(make_app, use_reloader=True) + action_shell = script.make_shell(lambda: {'app': make_app()}) + + + Using The Scripts + ----------------- + + The script from above can be used like this from the shell now: + + .. sourcecode:: text + + $ ./manage.py --help + $ ./manage.py runserver localhost 8080 --debugger --no-reloader + $ ./manage.py runserver -p 4000 + $ ./manage.py shell + + As you can see it's possible to pass parameters as positional arguments + or as named parameters, pretty much like Python function calls. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +''' +from __future__ import print_function + +import sys +import inspect +import getopt +from os.path import basename +from werkzeug._compat import iteritems + + +argument_types = { + bool: 'boolean', + str: 'string', + int: 'integer', + float: 'float' +} + + +converters = { + 'boolean': lambda x: x.lower() in ('1', 'true', 'yes', 'on'), + 'string': str, + 'integer': int, + 'float': float +} + + +def run(namespace=None, action_prefix='action_', args=None): + """Run the script. Participating actions are looked up in the caller's + namespace if no namespace is given, otherwise in the dict provided. + Only items that start with action_prefix are processed as actions. If + you want to use all items in the namespace provided as actions set + action_prefix to an empty string. + + :param namespace: An optional dict where the functions are looked up in. + By default the local namespace of the caller is used. + :param action_prefix: The prefix for the functions. Everything else + is ignored. + :param args: the arguments for the function. If not specified + :data:`sys.argv` without the first argument is used. + """ + if namespace is None: + namespace = sys._getframe(1).f_locals + actions = find_actions(namespace, action_prefix) + + if args is None: + args = sys.argv[1:] + if not args or args[0] in ('-h', '--help'): + return print_usage(actions) + elif args[0] not in actions: + fail('Unknown action \'%s\'' % args[0]) + + arguments = {} + types = {} + key_to_arg = {} + long_options = [] + formatstring = '' + func, doc, arg_def = actions[args.pop(0)] + for idx, (arg, shortcut, default, option_type) in enumerate(arg_def): + real_arg = arg.replace('-', '_') + if shortcut: + formatstring += shortcut + if not isinstance(default, bool): + formatstring += ':' + key_to_arg['-' + shortcut] = real_arg + long_options.append(isinstance(default, bool) and arg or arg + '=') + key_to_arg['--' + arg] = real_arg + key_to_arg[idx] = real_arg + types[real_arg] = option_type + arguments[real_arg] = default + + try: + optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options) + except getopt.GetoptError as e: + fail(str(e)) + + specified_arguments = set() + for key, value in enumerate(posargs): + try: + arg = key_to_arg[key] + except IndexError: + fail('Too many parameters') + specified_arguments.add(arg) + try: + arguments[arg] = converters[types[arg]](value) + except ValueError: + fail('Invalid value for argument %s (%s): %s' % (key, arg, value)) + + for key, value in optlist: + arg = key_to_arg[key] + if arg in specified_arguments: + fail('Argument \'%s\' is specified twice' % arg) + if types[arg] == 'boolean': + if arg.startswith('no_'): + value = 'no' + else: + value = 'yes' + try: + arguments[arg] = converters[types[arg]](value) + except ValueError: + fail('Invalid value for \'%s\': %s' % (key, value)) + + newargs = {} + for k, v in iteritems(arguments): + newargs[k.startswith('no_') and k[3:] or k] = v + arguments = newargs + return func(**arguments) + + +def fail(message, code=-1): + """Fail with an error.""" + print('Error: %s' % message, file=sys.stderr) + sys.exit(code) + + +def find_actions(namespace, action_prefix): + """Find all the actions in the namespace.""" + actions = {} + for key, value in iteritems(namespace): + if key.startswith(action_prefix): + actions[key[len(action_prefix):]] = analyse_action(value) + return actions + + +def print_usage(actions): + """Print the usage information. (Help screen)""" + actions = sorted(iteritems(actions)) + print('usage: %s []' % basename(sys.argv[0])) + print(' %s --help' % basename(sys.argv[0])) + print() + print('actions:') + for name, (func, doc, arguments) in actions: + print(' %s:' % name) + for line in doc.splitlines(): + print(' %s' % line) + if arguments: + print() + for arg, shortcut, default, argtype in arguments: + if isinstance(default, bool): + print(' %s' % ( + (shortcut and '-%s, ' % shortcut or '') + '--' + arg + )) + else: + print(' %-30s%-10s%s' % ( + (shortcut and '-%s, ' % shortcut or '') + '--' + arg, + argtype, default + )) + print() + + +def analyse_action(func): + """Analyse a function.""" + description = inspect.getdoc(func) or 'undocumented action' + arguments = [] + args, varargs, kwargs, defaults = inspect.getargspec(func) + if varargs or kwargs: + raise TypeError('variable length arguments for action not allowed.') + if len(args) != len(defaults or ()): + raise TypeError('not all arguments have proper definitions') + + for idx, (arg, definition) in enumerate(zip(args, defaults or ())): + if arg.startswith('_'): + raise TypeError('arguments may not start with an underscore') + if not isinstance(definition, tuple): + shortcut = None + default = definition + else: + shortcut, default = definition + argument_type = argument_types[type(default)] + if isinstance(default, bool) and default is True: + arg = 'no-' + arg + arguments.append((arg.replace('_', '-'), shortcut, + default, argument_type)) + return func, description, arguments + + +def make_shell(init_func=None, banner=None, use_ipython=True): + """Returns an action callback that spawns a new interactive + python shell. + + :param init_func: an optional initialization function that is + called before the shell is started. The return + value of this function is the initial namespace. + :param banner: the banner that is displayed before the shell. If + not specified a generic banner is used instead. + :param use_ipython: if set to `True` ipython is used if available. + """ + if banner is None: + banner = 'Interactive Werkzeug Shell' + if init_func is None: + init_func = dict + + def action(ipython=use_ipython): + """Start a new interactive python session.""" + namespace = init_func() + if ipython: + try: + try: + from IPython.frontend.terminal.embed import InteractiveShellEmbed + sh = InteractiveShellEmbed(banner1=banner) + except ImportError: + from IPython.Shell import IPShellEmbed + sh = IPShellEmbed(banner=banner) + except ImportError: + pass + else: + sh(global_ns={}, local_ns=namespace) + return + from code import interact + interact(banner, local=namespace) + return action + + +def make_runserver(app_factory, hostname='localhost', port=5000, + use_reloader=False, use_debugger=False, use_evalex=True, + threaded=False, processes=1, static_files=None, + extra_files=None, ssl_context=None): + """Returns an action callback that spawns a new development server. + + .. versionadded:: 0.5 + `static_files` and `extra_files` was added. + + ..versionadded:: 0.6.1 + `ssl_context` was added. + + :param app_factory: a function that returns a new WSGI application. + :param hostname: the default hostname the server should listen on. + :param port: the default port of the server. + :param use_reloader: the default setting for the reloader. + :param use_evalex: the default setting for the evalex flag of the debugger. + :param threaded: the default threading setting. + :param processes: the default number of processes to start. + :param static_files: optional dict of static files. + :param extra_files: optional list of extra files to track for reloading. + :param ssl_context: optional SSL context for running server in HTTPS mode. + """ + def action(hostname=('h', hostname), port=('p', port), + reloader=use_reloader, debugger=use_debugger, + evalex=use_evalex, threaded=threaded, processes=processes): + """Start a new development server.""" + from werkzeug.serving import run_simple + app = app_factory() + run_simple(hostname, port, app, + use_reloader=reloader, use_debugger=debugger, + use_evalex=evalex, extra_files=extra_files, + reloader_interval=1, threaded=threaded, processes=processes, + static_files=static_files, ssl_context=ssl_context) + return action diff --git a/deps/werkzeug/security.py b/deps/werkzeug/security.py new file mode 100644 index 00000000..04bb912b --- /dev/null +++ b/deps/werkzeug/security.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.security + ~~~~~~~~~~~~~~~~~ + + Security related helpers such as secure password hashing tools. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import os +import hmac +import hashlib +import posixpath +import codecs +from struct import Struct +from random import SystemRandom +from operator import xor +from itertools import starmap + +from werkzeug._compat import range_type, PY2, text_type, izip, to_bytes, \ + string_types, to_native + + +SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' +DEFAULT_PBKDF2_ITERATIONS = 1000 + + +_pack_int = Struct('>I').pack +_builtin_safe_str_cmp = getattr(hmac, 'compare_digest', None) +_sys_rng = SystemRandom() +_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] + if sep not in (None, '/')) + + +def _find_hashlib_algorithms(): + algos = getattr(hashlib, 'algorithms', None) + if algos is None: + algos = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') + rv = {} + for algo in algos: + func = getattr(hashlib, algo, None) + if func is not None: + rv[algo] = func + return rv +_hash_funcs = _find_hashlib_algorithms() + + +def pbkdf2_hex(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, + keylen=None, hashfunc=None): + """Like :func:`pbkdf2_bin`, but returns a hex-encoded string. + + .. versionadded:: 0.9 + + :param data: the data to derive. + :param salt: the salt for the derivation. + :param iterations: the number of iterations. + :param keylen: the length of the resulting key. If not provided, + the digest size will be used. + :param hashfunc: the hash function to use. This can either be the + string name of a known hash function, or a function + from the hashlib module. Defaults to sha1. + """ + rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc) + return to_native(codecs.encode(rv, 'hex_codec')) + + +_has_native_pbkdf2 = hasattr(hashlib, 'pbkdf2_hmac') + + +def pbkdf2_bin(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, + keylen=None, hashfunc=None): + """Returns a binary digest for the PBKDF2 hash algorithm of `data` + with the given `salt`. It iterates `iterations` times and produces a + key of `keylen` bytes. By default, SHA-1 is used as hash function; + a different hashlib `hashfunc` can be provided. + + .. versionadded:: 0.9 + + :param data: the data to derive. + :param salt: the salt for the derivation. + :param iterations: the number of iterations. + :param keylen: the length of the resulting key. If not provided + the digest size will be used. + :param hashfunc: the hash function to use. This can either be the + string name of a known hash function or a function + from the hashlib module. Defaults to sha1. + """ + if isinstance(hashfunc, string_types): + hashfunc = _hash_funcs[hashfunc] + elif not hashfunc: + hashfunc = hashlib.sha1 + data = to_bytes(data) + salt = to_bytes(salt) + + # If we're on Python with pbkdf2_hmac we can try to use it for + # compatible digests. + if _has_native_pbkdf2: + _test_hash = hashfunc() + if hasattr(_test_hash, 'name') and \ + _test_hash.name in _hash_funcs: + return hashlib.pbkdf2_hmac(_test_hash.name, + data, salt, iterations, + keylen) + + mac = hmac.HMAC(data, None, hashfunc) + if not keylen: + keylen = mac.digest_size + + def _pseudorandom(x, mac=mac): + h = mac.copy() + h.update(x) + return bytearray(h.digest()) + buf = bytearray() + for block in range_type(1, -(-keylen // mac.digest_size) + 1): + rv = u = _pseudorandom(salt + _pack_int(block)) + for i in range_type(iterations - 1): + u = _pseudorandom(bytes(u)) + rv = bytearray(starmap(xor, izip(rv, u))) + buf.extend(rv) + return bytes(buf[:keylen]) + + +def safe_str_cmp(a, b): + """This function compares strings in somewhat constant time. This + requires that the length of at least one string is known in advance. + + Returns `True` if the two strings are equal, or `False` if they are not. + + .. versionadded:: 0.7 + """ + if isinstance(a, text_type): + a = a.encode('utf-8') + if isinstance(b, text_type): + b = b.encode('utf-8') + + if _builtin_safe_str_cmp is not None: + return _builtin_safe_str_cmp(a, b) + + if len(a) != len(b): + return False + + rv = 0 + if PY2: + for x, y in izip(a, b): + rv |= ord(x) ^ ord(y) + else: + for x, y in izip(a, b): + rv |= x ^ y + + return rv == 0 + + +def gen_salt(length): + """Generate a random string of SALT_CHARS with specified ``length``.""" + if length <= 0: + raise ValueError('Salt length must be positive') + return ''.join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length)) + + +def _hash_internal(method, salt, password): + """Internal password hash helper. Supports plaintext without salt, + unsalted and salted passwords. In case salted passwords are used + hmac is used. + """ + if method == 'plain': + return password, method + + if isinstance(password, text_type): + password = password.encode('utf-8') + + if method.startswith('pbkdf2:'): + args = method[7:].split(':') + if len(args) not in (1, 2): + raise ValueError('Invalid number of arguments for PBKDF2') + method = args.pop(0) + iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS + is_pbkdf2 = True + actual_method = 'pbkdf2:%s:%d' % (method, iterations) + else: + is_pbkdf2 = False + actual_method = method + + hash_func = _hash_funcs.get(method) + if hash_func is None: + raise TypeError('invalid method %r' % method) + + if is_pbkdf2: + if not salt: + raise ValueError('Salt is required for PBKDF2') + rv = pbkdf2_hex(password, salt, iterations, + hashfunc=hash_func) + elif salt: + if isinstance(salt, text_type): + salt = salt.encode('utf-8') + rv = hmac.HMAC(salt, password, hash_func).hexdigest() + else: + h = hash_func() + h.update(password) + rv = h.hexdigest() + return rv, actual_method + + +def generate_password_hash(password, method='pbkdf2:sha1', salt_length=8): + """Hash a password with the given method and salt with with a string of + the given length. The format of the string returned includes the method + that was used so that :func:`check_password_hash` can check the hash. + + The format for the hashed string looks like this:: + + method$salt$hash + + This method can **not** generate unsalted passwords but it is possible + to set the method to plain to enforce plaintext passwords. If a salt + is used, hmac is used internally to salt the password. + + If PBKDF2 is wanted it can be enabled by setting the method to + ``pbkdf2:method:iterations`` where iterations is optional:: + + pbkdf2:sha1:2000$salt$hash + pbkdf2:sha1$salt$hash + + :param password: the password to hash. + :param method: the hash method to use (one that hashlib supports). Can + optionally be in the format ``pbkdf2:[:iterations]`` + to enable PBKDF2. + :param salt_length: the length of the salt in letters. + """ + salt = method != 'plain' and gen_salt(salt_length) or '' + h, actual_method = _hash_internal(method, salt, password) + return '%s$%s$%s' % (actual_method, salt, h) + + +def check_password_hash(pwhash, password): + """check a password against a given salted and hashed password value. + In order to support unsalted legacy passwords this method supports + plain text passwords, md5 and sha1 hashes (both salted and unsalted). + + Returns `True` if the password matched, `False` otherwise. + + :param pwhash: a hashed string like returned by + :func:`generate_password_hash`. + :param password: the plaintext password to compare against the hash. + """ + if pwhash.count('$') < 2: + return False + method, salt, hashval = pwhash.split('$', 2) + return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval) + + +def safe_join(directory, filename): + """Safely join `directory` and `filename`. If this cannot be done, + this function returns ``None``. + + :param directory: the base directory. + :param filename: the untrusted filename relative to that directory. + """ + filename = posixpath.normpath(filename) + for sep in _os_alt_seps: + if sep in filename: + return None + if os.path.isabs(filename) or filename.startswith('../'): + return None + return os.path.join(directory, filename) diff --git a/deps/werkzeug/serving.py b/deps/werkzeug/serving.py new file mode 100644 index 00000000..d6abbc31 --- /dev/null +++ b/deps/werkzeug/serving.py @@ -0,0 +1,742 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.serving + ~~~~~~~~~~~~~~~~ + + There are many ways to serve a WSGI application. While you're developing + it you usually don't want a full blown webserver like Apache but a simple + standalone one. From Python 2.5 onwards there is the `wsgiref`_ server in + the standard library. If you're using older versions of Python you can + download the package from the cheeseshop. + + However there are some caveats. Sourcecode won't reload itself when + changed and each time you kill the server using ``^C`` you get an + `KeyboardInterrupt` error. While the latter is easy to solve the first + one can be a pain in the ass in some situations. + + The easiest way is creating a small ``start-myproject.py`` that runs the + application:: + + #!/usr/bin/env python + # -*- coding: utf-8 -*- + from myproject import make_app + from werkzeug.serving import run_simple + + app = make_app(...) + run_simple('localhost', 8080, app, use_reloader=True) + + You can also pass it a `extra_files` keyword argument with a list of + additional files (like configuration files) you want to observe. + + For bigger applications you should consider using `werkzeug.script` + instead of a simple start file. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +from __future__ import with_statement + +import os +import socket +import sys +import signal + +try: + import ssl +except ImportError: + class _SslDummy(object): + def __getattr__(self, name): + raise RuntimeError('SSL support unavailable') + ssl = _SslDummy() + + +def _get_openssl_crypto_module(): + try: + from OpenSSL import crypto + except ImportError: + raise TypeError('Using ad-hoc certificates requires the pyOpenSSL ' + 'library.') + else: + return crypto + + +try: + from SocketServer import ThreadingMixIn, ForkingMixIn + from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler +except ImportError: + from socketserver import ThreadingMixIn, ForkingMixIn + from http.server import HTTPServer, BaseHTTPRequestHandler + +# important: do not use relative imports here or python -m will break +import werkzeug +from werkzeug._internal import _log +from werkzeug._compat import PY2, reraise, wsgi_encoding_dance +from werkzeug.urls import url_parse, url_unquote +from werkzeug.exceptions import InternalServerError + + +LISTEN_QUEUE = 128 +can_open_by_fd = hasattr(socket, 'fromfd') + + +class WSGIRequestHandler(BaseHTTPRequestHandler, object): + + """A request handler that implements WSGI dispatching.""" + + @property + def server_version(self): + return 'Werkzeug/' + werkzeug.__version__ + + def make_environ(self): + request_url = url_parse(self.path) + + def shutdown_server(): + self.server.shutdown_signal = True + + url_scheme = self.server.ssl_context is None and 'http' or 'https' + path_info = url_unquote(request_url.path) + + environ = { + 'wsgi.version': (1, 0), + 'wsgi.url_scheme': url_scheme, + 'wsgi.input': self.rfile, + 'wsgi.errors': sys.stderr, + 'wsgi.multithread': self.server.multithread, + 'wsgi.multiprocess': self.server.multiprocess, + 'wsgi.run_once': False, + 'werkzeug.server.shutdown': shutdown_server, + 'SERVER_SOFTWARE': self.server_version, + 'REQUEST_METHOD': self.command, + 'SCRIPT_NAME': '', + 'PATH_INFO': wsgi_encoding_dance(path_info), + 'QUERY_STRING': wsgi_encoding_dance(request_url.query), + 'CONTENT_TYPE': self.headers.get('Content-Type', ''), + 'CONTENT_LENGTH': self.headers.get('Content-Length', ''), + 'REMOTE_ADDR': self.address_string(), + 'REMOTE_PORT': self.port_integer(), + 'SERVER_NAME': self.server.server_address[0], + 'SERVER_PORT': str(self.server.server_address[1]), + 'SERVER_PROTOCOL': self.request_version + } + + for key, value in self.headers.items(): + key = 'HTTP_' + key.upper().replace('-', '_') + if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'): + environ[key] = value + + if request_url.scheme and request_url.netloc: + environ['HTTP_HOST'] = request_url.netloc + + return environ + + def run_wsgi(self): + if self.headers.get('Expect', '').lower().strip() == '100-continue': + self.wfile.write(b'HTTP/1.1 100 Continue\r\n\r\n') + + self.environ = environ = self.make_environ() + headers_set = [] + headers_sent = [] + + def write(data): + assert headers_set, 'write() before start_response' + if not headers_sent: + status, response_headers = headers_sent[:] = headers_set + try: + code, msg = status.split(None, 1) + except ValueError: + code, msg = status, "" + self.send_response(int(code), msg) + header_keys = set() + for key, value in response_headers: + self.send_header(key, value) + key = key.lower() + header_keys.add(key) + if 'content-length' not in header_keys: + self.close_connection = True + self.send_header('Connection', 'close') + if 'server' not in header_keys: + self.send_header('Server', self.version_string()) + if 'date' not in header_keys: + self.send_header('Date', self.date_time_string()) + self.end_headers() + + assert isinstance(data, bytes), 'applications must write bytes' + self.wfile.write(data) + self.wfile.flush() + + def start_response(status, response_headers, exc_info=None): + if exc_info: + try: + if headers_sent: + reraise(*exc_info) + finally: + exc_info = None + elif headers_set: + raise AssertionError('Headers already set') + headers_set[:] = [status, response_headers] + return write + + def execute(app): + application_iter = app(environ, start_response) + try: + for data in application_iter: + write(data) + if not headers_sent: + write(b'') + finally: + if hasattr(application_iter, 'close'): + application_iter.close() + application_iter = None + + try: + execute(self.server.app) + except (socket.error, socket.timeout) as e: + self.connection_dropped(e, environ) + except Exception: + if self.server.passthrough_errors: + raise + from werkzeug.debug.tbtools import get_current_traceback + traceback = get_current_traceback(ignore_system_exceptions=True) + try: + # if we haven't yet sent the headers but they are set + # we roll back to be able to set them again. + if not headers_sent: + del headers_set[:] + execute(InternalServerError()) + except Exception: + pass + self.server.log('error', 'Error on request:\n%s', + traceback.plaintext) + + def handle(self): + """Handles a request ignoring dropped connections.""" + rv = None + try: + rv = BaseHTTPRequestHandler.handle(self) + except (socket.error, socket.timeout) as e: + self.connection_dropped(e) + except Exception: + if self.server.ssl_context is None or not is_ssl_error(): + raise + if self.server.shutdown_signal: + self.initiate_shutdown() + return rv + + def initiate_shutdown(self): + """A horrible, horrible way to kill the server for Python 2.6 and + later. It's the best we can do. + """ + # Windows does not provide SIGKILL, go with SIGTERM then. + sig = getattr(signal, 'SIGKILL', signal.SIGTERM) + # reloader active + if os.environ.get('WERKZEUG_RUN_MAIN') == 'true': + os.kill(os.getpid(), sig) + # python 2.7 + self.server._BaseServer__shutdown_request = True + # python 2.6 + self.server._BaseServer__serving = False + + def connection_dropped(self, error, environ=None): + """Called if the connection was closed by the client. By default + nothing happens. + """ + + def handle_one_request(self): + """Handle a single HTTP request.""" + self.raw_requestline = self.rfile.readline() + if not self.raw_requestline: + self.close_connection = 1 + elif self.parse_request(): + return self.run_wsgi() + + def send_response(self, code, message=None): + """Send the response header and log the response code.""" + self.log_request(code) + if message is None: + message = code in self.responses and self.responses[code][0] or '' + if self.request_version != 'HTTP/0.9': + hdr = "%s %d %s\r\n" % (self.protocol_version, code, message) + self.wfile.write(hdr.encode('ascii')) + + def version_string(self): + return BaseHTTPRequestHandler.version_string(self).strip() + + def address_string(self): + return self.client_address[0] + + def port_integer(self): + return self.client_address[1] + + def log_request(self, code='-', size='-'): + self.log('info', '"%s" %s %s', self.requestline, code, size) + + def log_error(self, *args): + self.log('error', *args) + + def log_message(self, format, *args): + self.log('info', format, *args) + + def log(self, type, message, *args): + _log(type, '%s - - [%s] %s\n' % (self.address_string(), + self.log_date_time_string(), + message % args)) + + +#: backwards compatible name if someone is subclassing it +BaseRequestHandler = WSGIRequestHandler + + +def generate_adhoc_ssl_pair(cn=None): + from random import random + crypto = _get_openssl_crypto_module() + + # pretty damn sure that this is not actually accepted by anyone + if cn is None: + cn = '*' + + cert = crypto.X509() + cert.set_serial_number(int(random() * sys.maxsize)) + cert.gmtime_adj_notBefore(0) + cert.gmtime_adj_notAfter(60 * 60 * 24 * 365) + + subject = cert.get_subject() + subject.CN = cn + subject.O = 'Dummy Certificate' + + issuer = cert.get_issuer() + issuer.CN = 'Untrusted Authority' + issuer.O = 'Self-Signed' + + pkey = crypto.PKey() + pkey.generate_key(crypto.TYPE_RSA, 1024) + cert.set_pubkey(pkey) + cert.sign(pkey, 'md5') + + return cert, pkey + + +def make_ssl_devcert(base_path, host=None, cn=None): + """Creates an SSL key for development. This should be used instead of + the ``'adhoc'`` key which generates a new cert on each server start. + It accepts a path for where it should store the key and cert and + either a host or CN. If a host is given it will use the CN + ``*.host/CN=host``. + + For more information see :func:`run_simple`. + + .. versionadded:: 0.9 + + :param base_path: the path to the certificate and key. The extension + ``.crt`` is added for the certificate, ``.key`` is + added for the key. + :param host: the name of the host. This can be used as an alternative + for the `cn`. + :param cn: the `CN` to use. + """ + from OpenSSL import crypto + if host is not None: + cn = '*.%s/CN=%s' % (host, host) + cert, pkey = generate_adhoc_ssl_pair(cn=cn) + + cert_file = base_path + '.crt' + pkey_file = base_path + '.key' + + with open(cert_file, 'wb') as f: + f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) + with open(pkey_file, 'wb') as f: + f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) + + return cert_file, pkey_file + + +def generate_adhoc_ssl_context(): + """Generates an adhoc SSL context for the development server.""" + crypto = _get_openssl_crypto_module() + import tempfile + import atexit + + cert, pkey = generate_adhoc_ssl_pair() + cert_handle, cert_file = tempfile.mkstemp() + pkey_handle, pkey_file = tempfile.mkstemp() + atexit.register(os.remove, pkey_file) + atexit.register(os.remove, cert_file) + + os.write(cert_handle, crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) + os.write(pkey_handle, crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) + os.close(cert_handle) + os.close(pkey_handle) + ctx = load_ssl_context(cert_file, pkey_file) + return ctx + + +def load_ssl_context(cert_file, pkey_file=None, protocol=None): + """Loads SSL context from cert/private key files and optional protocol. + Many parameters are directly taken from the API of + :py:class:`ssl.SSLContext`. + + :param cert_file: Path of the certificate to use. + :param pkey_file: Path of the private key to use. If not given, the key + will be obtained from the certificate file. + :param protocol: One of the ``PROTOCOL_*`` constants in the stdlib ``ssl`` + module. Defaults to ``PROTOCOL_SSLv23``. + """ + if protocol is None: + protocol = ssl.PROTOCOL_SSLv23 + ctx = _SSLContext(protocol) + ctx.load_cert_chain(cert_file, pkey_file) + return ctx + + +class _SSLContext(object): + + '''A dummy class with a small subset of Python3's ``ssl.SSLContext``, only + intended to be used with and by Werkzeug.''' + + def __init__(self, protocol): + self._protocol = protocol + self._certfile = None + self._keyfile = None + self._password = None + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._certfile = certfile + self._keyfile = keyfile or certfile + self._password = password + + def wrap_socket(self, sock, **kwargs): + return ssl.wrap_socket(sock, keyfile=self._keyfile, + certfile=self._certfile, + ssl_version=self._protocol, **kwargs) + + +def is_ssl_error(error=None): + """Checks if the given error (or the current one) is an SSL error.""" + exc_types = (ssl.SSLError,) + try: + from OpenSSL.SSL import Error + exc_types += (Error,) + except ImportError: + pass + + if error is None: + error = sys.exc_info()[1] + return isinstance(error, exc_types) + + +def select_ip_version(host, port): + """Returns AF_INET4 or AF_INET6 depending on where to connect to.""" + # disabled due to problems with current ipv6 implementations + # and various operating systems. Probably this code also is + # not supposed to work, but I can't come up with any other + # ways to implement this. + # try: + # info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, + # socket.SOCK_STREAM, 0, + # socket.AI_PASSIVE) + # if info: + # return info[0][0] + # except socket.gaierror: + # pass + if ':' in host and hasattr(socket, 'AF_INET6'): + return socket.AF_INET6 + return socket.AF_INET + + +class BaseWSGIServer(HTTPServer, object): + + """Simple single-threaded, single-process WSGI server.""" + multithread = False + multiprocess = False + request_queue_size = LISTEN_QUEUE + + def __init__(self, host, port, app, handler=None, + passthrough_errors=False, ssl_context=None, fd=None): + if handler is None: + handler = WSGIRequestHandler + + self.address_family = select_ip_version(host, port) + + if fd is not None: + real_sock = socket.fromfd(fd, self.address_family, + socket.SOCK_STREAM) + port = 0 + HTTPServer.__init__(self, (host, int(port)), handler) + self.app = app + self.passthrough_errors = passthrough_errors + self.shutdown_signal = False + self.host = host + self.port = port + + # Patch in the original socket. + if fd is not None: + self.socket.close() + self.socket = real_sock + self.server_address = self.socket.getsockname() + + if ssl_context is not None: + if isinstance(ssl_context, tuple): + ssl_context = load_ssl_context(*ssl_context) + if ssl_context == 'adhoc': + ssl_context = generate_adhoc_ssl_context() + # If we are on Python 2 the return value from socket.fromfd + # is an internal socket object but what we need for ssl wrap + # is the wrapper around it :( + sock = self.socket + if PY2 and not isinstance(sock, socket.socket): + sock = socket.socket(sock.family, sock.type, sock.proto, sock) + self.socket = ssl_context.wrap_socket(sock, server_side=True) + self.ssl_context = ssl_context + else: + self.ssl_context = None + + def log(self, type, message, *args): + _log(type, message, *args) + + def serve_forever(self): + self.shutdown_signal = False + try: + HTTPServer.serve_forever(self) + except KeyboardInterrupt: + pass + finally: + self.server_close() + + def handle_error(self, request, client_address): + if self.passthrough_errors: + raise + return HTTPServer.handle_error(self, request, client_address) + + def get_request(self): + con, info = self.socket.accept() + return con, info + + +class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer): + + """A WSGI server that does threading.""" + multithread = True + + +class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): + + """A WSGI server that does forking.""" + multiprocess = True + + def __init__(self, host, port, app, processes=40, handler=None, + passthrough_errors=False, ssl_context=None, fd=None): + BaseWSGIServer.__init__(self, host, port, app, handler, + passthrough_errors, ssl_context, fd) + self.max_children = processes + + +def make_server(host=None, port=None, app=None, threaded=False, processes=1, + request_handler=None, passthrough_errors=False, + ssl_context=None, fd=None): + """Create a new server instance that is either threaded, or forks + or just processes one request after another. + """ + if threaded and processes > 1: + raise ValueError("cannot have a multithreaded and " + "multi process server.") + elif threaded: + return ThreadedWSGIServer(host, port, app, request_handler, + passthrough_errors, ssl_context, fd=fd) + elif processes > 1: + return ForkingWSGIServer(host, port, app, processes, request_handler, + passthrough_errors, ssl_context, fd=fd) + else: + return BaseWSGIServer(host, port, app, request_handler, + passthrough_errors, ssl_context, fd=fd) + + +def is_running_from_reloader(): + """Checks if the application is running from within the Werkzeug + reloader subprocess. + + .. versionadded:: 0.10 + """ + return os.environ.get('WERKZEUG_RUN_MAIN') == 'true' + + +def run_simple(hostname, port, application, use_reloader=False, + use_debugger=False, use_evalex=True, + extra_files=None, reloader_interval=1, + reloader_type='auto', threaded=False, + processes=1, request_handler=None, static_files=None, + passthrough_errors=False, ssl_context=None): + """Start a WSGI application. Optional features include a reloader, + multithreading and fork support. + + This function has a command-line interface too:: + + python -m werkzeug.serving --help + + .. versionadded:: 0.5 + `static_files` was added to simplify serving of static files as well + as `passthrough_errors`. + + .. versionadded:: 0.6 + support for SSL was added. + + .. versionadded:: 0.8 + Added support for automatically loading a SSL context from certificate + file and private key. + + .. versionadded:: 0.9 + Added command-line interface. + + .. versionadded:: 0.10 + Improved the reloader and added support for changing the backend + through the `reloader_type` parameter. See :ref:`reloader` + for more information. + + :param hostname: The host for the application. eg: ``'localhost'`` + :param port: The port for the server. eg: ``8080`` + :param application: the WSGI application to execute + :param use_reloader: should the server automatically restart the python + process if modules were changed? + :param use_debugger: should the werkzeug debugging system be used? + :param use_evalex: should the exception evaluation feature be enabled? + :param extra_files: a list of files the reloader should watch + additionally to the modules. For example configuration + files. + :param reloader_interval: the interval for the reloader in seconds. + :param reloader_type: the type of reloader to use. The default is + auto detection. Valid values are ``'stat'`` and + ``'watchdog'``. See :ref:`reloader` for more + information. + :param threaded: should the process handle each request in a separate + thread? + :param processes: if greater than 1 then handle each request in a new process + up to this maximum number of concurrent processes. + :param request_handler: optional parameter that can be used to replace + the default one. You can use this to replace it + with a different + :class:`~BaseHTTPServer.BaseHTTPRequestHandler` + subclass. + :param static_files: a dict of paths for static files. This works exactly + like :class:`SharedDataMiddleware`, it's actually + just wrapping the application in that middleware before + serving. + :param passthrough_errors: set this to `True` to disable the error catching. + This means that the server will die on errors but + it can be useful to hook debuggers in (pdb etc.) + :param ssl_context: an SSL context for the connection. Either an + :class:`ssl.SSLContext`, a tuple in the form + ``(cert_file, pkey_file)``, the string ``'adhoc'`` if + the server should automatically create one, or ``None`` + to disable SSL (which is the default). + """ + if use_debugger: + from werkzeug.debug import DebuggedApplication + application = DebuggedApplication(application, use_evalex) + if static_files: + from werkzeug.wsgi import SharedDataMiddleware + application = SharedDataMiddleware(application, static_files) + + def log_startup(sock): + display_hostname = hostname not in ('', '*') and hostname or 'localhost' + if ':' in display_hostname: + display_hostname = '[%s]' % display_hostname + quit_msg = '(Press CTRL+C to quit)' + port = sock.getsockname()[1] + _log('info', ' * Running on %s://%s:%d/ %s', + ssl_context is None and 'http' or 'https', + display_hostname, port, quit_msg) + + def inner(): + try: + fd = int(os.environ['WERKZEUG_SERVER_FD']) + except (LookupError, ValueError): + fd = None + srv = make_server(hostname, port, application, threaded, + processes, request_handler, + passthrough_errors, ssl_context, + fd=fd) + if fd is None: + log_startup(srv.socket) + srv.serve_forever() + + if use_reloader: + # If we're not running already in the subprocess that is the + # reloader we want to open up a socket early to make sure the + # port is actually available. + if os.environ.get('WERKZEUG_RUN_MAIN') != 'true': + if port == 0 and not can_open_by_fd: + raise ValueError('Cannot bind to a random port with enabled ' + 'reloader if the Python interpreter does ' + 'not support socket opening by fd.') + + # Create and destroy a socket so that any exceptions are + # raised before we spawn a separate Python interpreter and + # lose this ability. + address_family = select_ip_version(hostname, port) + s = socket.socket(address_family, socket.SOCK_STREAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((hostname, port)) + if hasattr(s, 'set_inheritable'): + s.set_inheritable(True) + + # If we can open the socket by file descriptor, then we can just + # reuse this one and our socket will survive the restarts. + if can_open_by_fd: + os.environ['WERKZEUG_SERVER_FD'] = str(s.fileno()) + s.listen(LISTEN_QUEUE) + log_startup(s) + else: + s.close() + + from ._reloader import run_with_reloader + run_with_reloader(inner, extra_files, reloader_interval, + reloader_type) + else: + inner() + + +def run_with_reloader(*args, **kwargs): + # People keep using undocumented APIs. Do not use this function + # please, we do not guarantee that it continues working. + from ._reloader import run_with_reloader + return run_with_reloader(*args, **kwargs) + + +def main(): + '''A simple command-line interface for :py:func:`run_simple`.''' + + # in contrast to argparse, this works at least under Python < 2.7 + import optparse + from werkzeug.utils import import_string + + parser = optparse.OptionParser( + usage='Usage: %prog [options] app_module:app_object') + parser.add_option('-b', '--bind', dest='address', + help='The hostname:port the app should listen on.') + parser.add_option('-d', '--debug', dest='use_debugger', + action='store_true', default=False, + help='Use Werkzeug\'s debugger.') + parser.add_option('-r', '--reload', dest='use_reloader', + action='store_true', default=False, + help='Reload Python process if modules change.') + options, args = parser.parse_args() + + hostname, port = None, None + if options.address: + address = options.address.split(':') + hostname = address[0] + if len(address) > 1: + port = address[1] + + if len(args) != 1: + sys.stdout.write('No application supplied, or too much. See --help\n') + sys.exit(1) + app = import_string(args[0]) + + run_simple( + hostname=(hostname or '127.0.0.1'), port=int(port or 5000), + application=app, use_reloader=options.use_reloader, + use_debugger=options.use_debugger + ) + +if __name__ == '__main__': + main() diff --git a/deps/werkzeug/test.py b/deps/werkzeug/test.py new file mode 100644 index 00000000..84f1cd94 --- /dev/null +++ b/deps/werkzeug/test.py @@ -0,0 +1,896 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.test + ~~~~~~~~~~~~~ + + This module implements a client to WSGI applications for testing. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import sys +import mimetypes +from time import time +from random import random +from itertools import chain +from tempfile import TemporaryFile +from io import BytesIO + +try: + from urllib2 import Request as U2Request +except ImportError: + from urllib.request import Request as U2Request +try: + from http.cookiejar import CookieJar +except ImportError: # Py2 + from cookielib import CookieJar + +from werkzeug._compat import iterlists, iteritems, itervalues, to_bytes, \ + string_types, text_type, reraise, wsgi_encoding_dance, \ + make_literal_wrapper +from werkzeug._internal import _empty_stream, _get_environ +from werkzeug.wrappers import BaseRequest +from werkzeug.urls import url_encode, url_fix, iri_to_uri, url_unquote, \ + url_unparse, url_parse +from werkzeug.wsgi import get_host, get_current_url, ClosingIterator +from werkzeug.utils import dump_cookie +from werkzeug.datastructures import FileMultiDict, MultiDict, \ + CombinedMultiDict, Headers, FileStorage + + +def stream_encode_multipart(values, use_tempfile=True, threshold=1024 * 500, + boundary=None, charset='utf-8'): + """Encode a dict of values (either strings or file descriptors or + :class:`FileStorage` objects.) into a multipart encoded string stored + in a file descriptor. + """ + if boundary is None: + boundary = '---------------WerkzeugFormPart_%s%s' % (time(), random()) + _closure = [BytesIO(), 0, False] + + if use_tempfile: + def write_binary(string): + stream, total_length, on_disk = _closure + if on_disk: + stream.write(string) + else: + length = len(string) + if length + _closure[1] <= threshold: + stream.write(string) + else: + new_stream = TemporaryFile('wb+') + new_stream.write(stream.getvalue()) + new_stream.write(string) + _closure[0] = new_stream + _closure[2] = True + _closure[1] = total_length + length + else: + write_binary = _closure[0].write + + def write(string): + write_binary(string.encode(charset)) + + if not isinstance(values, MultiDict): + values = MultiDict(values) + + for key, values in iterlists(values): + for value in values: + write('--%s\r\nContent-Disposition: form-data; name="%s"' % + (boundary, key)) + reader = getattr(value, 'read', None) + if reader is not None: + filename = getattr(value, 'filename', + getattr(value, 'name', None)) + content_type = getattr(value, 'content_type', None) + if content_type is None: + content_type = filename and \ + mimetypes.guess_type(filename)[0] or \ + 'application/octet-stream' + if filename is not None: + write('; filename="%s"\r\n' % filename) + else: + write('\r\n') + write('Content-Type: %s\r\n\r\n' % content_type) + while 1: + chunk = reader(16384) + if not chunk: + break + write_binary(chunk) + else: + if not isinstance(value, string_types): + value = str(value) + + value = to_bytes(value, charset) + write('\r\n\r\n') + write_binary(value) + write('\r\n') + write('--%s--\r\n' % boundary) + + length = int(_closure[0].tell()) + _closure[0].seek(0) + return _closure[0], length, boundary + + +def encode_multipart(values, boundary=None, charset='utf-8'): + """Like `stream_encode_multipart` but returns a tuple in the form + (``boundary``, ``data``) where data is a bytestring. + """ + stream, length, boundary = stream_encode_multipart( + values, use_tempfile=False, boundary=boundary, charset=charset) + return boundary, stream.read() + + +def File(fd, filename=None, mimetype=None): + """Backwards compat.""" + from warnings import warn + warn(DeprecationWarning('werkzeug.test.File is deprecated, use the ' + 'EnvironBuilder or FileStorage instead')) + return FileStorage(fd, filename=filename, content_type=mimetype) + + +class _TestCookieHeaders(object): + + """A headers adapter for cookielib + """ + + def __init__(self, headers): + self.headers = headers + + def getheaders(self, name): + headers = [] + name = name.lower() + for k, v in self.headers: + if k.lower() == name: + headers.append(v) + return headers + + def get_all(self, name, default=None): + rv = [] + for k, v in self.headers: + if k.lower() == name.lower(): + rv.append(v) + return rv or default or [] + + +class _TestCookieResponse(object): + + """Something that looks like a httplib.HTTPResponse, but is actually just an + adapter for our test responses to make them available for cookielib. + """ + + def __init__(self, headers): + self.headers = _TestCookieHeaders(headers) + + def info(self): + return self.headers + + +class _TestCookieJar(CookieJar): + + """A cookielib.CookieJar modified to inject and read cookie headers from + and to wsgi environments, and wsgi application responses. + """ + + def inject_wsgi(self, environ): + """Inject the cookies as client headers into the server's wsgi + environment. + """ + cvals = [] + for cookie in self: + cvals.append('%s=%s' % (cookie.name, cookie.value)) + if cvals: + environ['HTTP_COOKIE'] = '; '.join(cvals) + + def extract_wsgi(self, environ, headers): + """Extract the server's set-cookie headers as cookies into the + cookie jar. + """ + self.extract_cookies( + _TestCookieResponse(headers), + U2Request(get_current_url(environ)), + ) + + +def _iter_data(data): + """Iterates over a dict or multidict yielding all keys and values. + This is used to iterate over the data passed to the + :class:`EnvironBuilder`. + """ + if isinstance(data, MultiDict): + for key, values in iterlists(data): + for value in values: + yield key, value + else: + for key, values in iteritems(data): + if isinstance(values, list): + for value in values: + yield key, value + else: + yield key, values + + +class EnvironBuilder(object): + + """This class can be used to conveniently create a WSGI environment + for testing purposes. It can be used to quickly create WSGI environments + or request objects from arbitrary data. + + The signature of this class is also used in some other places as of + Werkzeug 0.5 (:func:`create_environ`, :meth:`BaseResponse.from_values`, + :meth:`Client.open`). Because of this most of the functionality is + available through the constructor alone. + + Files and regular form data can be manipulated independently of each + other with the :attr:`form` and :attr:`files` attributes, but are + passed with the same argument to the constructor: `data`. + + `data` can be any of these values: + + - a `str`: If it's a string it is converted into a :attr:`input_stream`, + the :attr:`content_length` is set and you have to provide a + :attr:`content_type`. + - a `dict`: If it's a dict the keys have to be strings and the values + any of the following objects: + + - a :class:`file`-like object. These are converted into + :class:`FileStorage` objects automatically. + - a tuple. The :meth:`~FileMultiDict.add_file` method is called + with the tuple items as positional arguments. + + .. versionadded:: 0.6 + `path` and `base_url` can now be unicode strings that are encoded using + the :func:`iri_to_uri` function. + + :param path: the path of the request. In the WSGI environment this will + end up as `PATH_INFO`. If the `query_string` is not defined + and there is a question mark in the `path` everything after + it is used as query string. + :param base_url: the base URL is a URL that is used to extract the WSGI + URL scheme, host (server name + server port) and the + script root (`SCRIPT_NAME`). + :param query_string: an optional string or dict with URL parameters. + :param method: the HTTP method to use, defaults to `GET`. + :param input_stream: an optional input stream. Do not specify this and + `data`. As soon as an input stream is set you can't + modify :attr:`args` and :attr:`files` unless you + set the :attr:`input_stream` to `None` again. + :param content_type: The content type for the request. As of 0.5 you + don't have to provide this when specifying files + and form data via `data`. + :param content_length: The content length for the request. You don't + have to specify this when providing data via + `data`. + :param errors_stream: an optional error stream that is used for + `wsgi.errors`. Defaults to :data:`stderr`. + :param multithread: controls `wsgi.multithread`. Defaults to `False`. + :param multiprocess: controls `wsgi.multiprocess`. Defaults to `False`. + :param run_once: controls `wsgi.run_once`. Defaults to `False`. + :param headers: an optional list or :class:`Headers` object of headers. + :param data: a string or dict of form data. See explanation above. + :param environ_base: an optional dict of environment defaults. + :param environ_overrides: an optional dict of environment overrides. + :param charset: the charset used to encode unicode data. + """ + + #: the server protocol to use. defaults to HTTP/1.1 + server_protocol = 'HTTP/1.1' + + #: the wsgi version to use. defaults to (1, 0) + wsgi_version = (1, 0) + + #: the default request class for :meth:`get_request` + request_class = BaseRequest + + def __init__(self, path='/', base_url=None, query_string=None, + method='GET', input_stream=None, content_type=None, + content_length=None, errors_stream=None, multithread=False, + multiprocess=False, run_once=False, headers=None, data=None, + environ_base=None, environ_overrides=None, charset='utf-8'): + path_s = make_literal_wrapper(path) + if query_string is None and path_s('?') in path: + path, query_string = path.split(path_s('?'), 1) + self.charset = charset + self.path = iri_to_uri(path) + if base_url is not None: + base_url = url_fix(iri_to_uri(base_url, charset), charset) + self.base_url = base_url + if isinstance(query_string, (bytes, text_type)): + self.query_string = query_string + else: + if query_string is None: + query_string = MultiDict() + elif not isinstance(query_string, MultiDict): + query_string = MultiDict(query_string) + self.args = query_string + self.method = method + if headers is None: + headers = Headers() + elif not isinstance(headers, Headers): + headers = Headers(headers) + self.headers = headers + if content_type is not None: + self.content_type = content_type + if errors_stream is None: + errors_stream = sys.stderr + self.errors_stream = errors_stream + self.multithread = multithread + self.multiprocess = multiprocess + self.run_once = run_once + self.environ_base = environ_base + self.environ_overrides = environ_overrides + self.input_stream = input_stream + self.content_length = content_length + self.closed = False + + if data: + if input_stream is not None: + raise TypeError('can\'t provide input stream and data') + if isinstance(data, text_type): + data = data.encode(self.charset) + if isinstance(data, bytes): + self.input_stream = BytesIO(data) + if self.content_length is None: + self.content_length = len(data) + else: + for key, value in _iter_data(data): + if isinstance(value, (tuple, dict)) or \ + hasattr(value, 'read'): + self._add_file_from_data(key, value) + else: + self.form.setlistdefault(key).append(value) + + def _add_file_from_data(self, key, value): + """Called in the EnvironBuilder to add files from the data dict.""" + if isinstance(value, tuple): + self.files.add_file(key, *value) + elif isinstance(value, dict): + from warnings import warn + warn(DeprecationWarning('it\'s no longer possible to pass dicts ' + 'as `data`. Use tuples or FileStorage ' + 'objects instead'), stacklevel=2) + value = dict(value) + mimetype = value.pop('mimetype', None) + if mimetype is not None: + value['content_type'] = mimetype + self.files.add_file(key, **value) + else: + self.files.add_file(key, value) + + def _get_base_url(self): + return url_unparse((self.url_scheme, self.host, + self.script_root, '', '')).rstrip('/') + '/' + + def _set_base_url(self, value): + if value is None: + scheme = 'http' + netloc = 'localhost' + script_root = '' + else: + scheme, netloc, script_root, qs, anchor = url_parse(value) + if qs or anchor: + raise ValueError('base url must not contain a query string ' + 'or fragment') + self.script_root = script_root.rstrip('/') + self.host = netloc + self.url_scheme = scheme + + base_url = property(_get_base_url, _set_base_url, doc=''' + The base URL is a URL that is used to extract the WSGI + URL scheme, host (server name + server port) and the + script root (`SCRIPT_NAME`).''') + del _get_base_url, _set_base_url + + def _get_content_type(self): + ct = self.headers.get('Content-Type') + if ct is None and not self._input_stream: + if self._files: + return 'multipart/form-data' + elif self._form: + return 'application/x-www-form-urlencoded' + return None + return ct + + def _set_content_type(self, value): + if value is None: + self.headers.pop('Content-Type', None) + else: + self.headers['Content-Type'] = value + + content_type = property(_get_content_type, _set_content_type, doc=''' + The content type for the request. Reflected from and to the + :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection.''') + del _get_content_type, _set_content_type + + def _get_content_length(self): + return self.headers.get('Content-Length', type=int) + + def _set_content_length(self, value): + if value is None: + self.headers.pop('Content-Length', None) + else: + self.headers['Content-Length'] = str(value) + + content_length = property(_get_content_length, _set_content_length, doc=''' + The content length as integer. Reflected from and to the + :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection.''') + del _get_content_length, _set_content_length + + def form_property(name, storage, doc): + key = '_' + name + + def getter(self): + if self._input_stream is not None: + raise AttributeError('an input stream is defined') + rv = getattr(self, key) + if rv is None: + rv = storage() + setattr(self, key, rv) + + return rv + + def setter(self, value): + self._input_stream = None + setattr(self, key, value) + return property(getter, setter, doc) + + form = form_property('form', MultiDict, doc=''' + A :class:`MultiDict` of form values.''') + files = form_property('files', FileMultiDict, doc=''' + A :class:`FileMultiDict` of uploaded files. You can use the + :meth:`~FileMultiDict.add_file` method to add new files to the + dict.''') + del form_property + + def _get_input_stream(self): + return self._input_stream + + def _set_input_stream(self, value): + self._input_stream = value + self._form = self._files = None + + input_stream = property(_get_input_stream, _set_input_stream, doc=''' + An optional input stream. If you set this it will clear + :attr:`form` and :attr:`files`.''') + del _get_input_stream, _set_input_stream + + def _get_query_string(self): + if self._query_string is None: + if self._args is not None: + return url_encode(self._args, charset=self.charset) + return '' + return self._query_string + + def _set_query_string(self, value): + self._query_string = value + self._args = None + + query_string = property(_get_query_string, _set_query_string, doc=''' + The query string. If you set this to a string :attr:`args` will + no longer be available.''') + del _get_query_string, _set_query_string + + def _get_args(self): + if self._query_string is not None: + raise AttributeError('a query string is defined') + if self._args is None: + self._args = MultiDict() + return self._args + + def _set_args(self, value): + self._query_string = None + self._args = value + + args = property(_get_args, _set_args, doc=''' + The URL arguments as :class:`MultiDict`.''') + del _get_args, _set_args + + @property + def server_name(self): + """The server name (read-only, use :attr:`host` to set)""" + return self.host.split(':', 1)[0] + + @property + def server_port(self): + """The server port as integer (read-only, use :attr:`host` to set)""" + pieces = self.host.split(':', 1) + if len(pieces) == 2 and pieces[1].isdigit(): + return int(pieces[1]) + elif self.url_scheme == 'https': + return 443 + return 80 + + def __del__(self): + try: + self.close() + except Exception: + pass + + def close(self): + """Closes all files. If you put real :class:`file` objects into the + :attr:`files` dict you can call this method to automatically close + them all in one go. + """ + if self.closed: + return + try: + files = itervalues(self.files) + except AttributeError: + files = () + for f in files: + try: + f.close() + except Exception: + pass + self.closed = True + + def get_environ(self): + """Return the built environ.""" + input_stream = self.input_stream + content_length = self.content_length + content_type = self.content_type + + if input_stream is not None: + start_pos = input_stream.tell() + input_stream.seek(0, 2) + end_pos = input_stream.tell() + input_stream.seek(start_pos) + content_length = end_pos - start_pos + elif content_type == 'multipart/form-data': + values = CombinedMultiDict([self.form, self.files]) + input_stream, content_length, boundary = \ + stream_encode_multipart(values, charset=self.charset) + content_type += '; boundary="%s"' % boundary + elif content_type == 'application/x-www-form-urlencoded': + # XXX: py2v3 review + values = url_encode(self.form, charset=self.charset) + values = values.encode('ascii') + content_length = len(values) + input_stream = BytesIO(values) + else: + input_stream = _empty_stream + + result = {} + if self.environ_base: + result.update(self.environ_base) + + def _path_encode(x): + return wsgi_encoding_dance(url_unquote(x, self.charset), self.charset) + + qs = wsgi_encoding_dance(self.query_string) + + result.update({ + 'REQUEST_METHOD': self.method, + 'SCRIPT_NAME': _path_encode(self.script_root), + 'PATH_INFO': _path_encode(self.path), + 'QUERY_STRING': qs, + 'SERVER_NAME': self.server_name, + 'SERVER_PORT': str(self.server_port), + 'HTTP_HOST': self.host, + 'SERVER_PROTOCOL': self.server_protocol, + 'CONTENT_TYPE': content_type or '', + 'CONTENT_LENGTH': str(content_length or '0'), + 'wsgi.version': self.wsgi_version, + 'wsgi.url_scheme': self.url_scheme, + 'wsgi.input': input_stream, + 'wsgi.errors': self.errors_stream, + 'wsgi.multithread': self.multithread, + 'wsgi.multiprocess': self.multiprocess, + 'wsgi.run_once': self.run_once + }) + for key, value in self.headers.to_wsgi_list(): + result['HTTP_%s' % key.upper().replace('-', '_')] = value + if self.environ_overrides: + result.update(self.environ_overrides) + return result + + def get_request(self, cls=None): + """Returns a request with the data. If the request class is not + specified :attr:`request_class` is used. + + :param cls: The request wrapper to use. + """ + if cls is None: + cls = self.request_class + return cls(self.get_environ()) + + +class ClientRedirectError(Exception): + + """ + If a redirect loop is detected when using follow_redirects=True with + the :cls:`Client`, then this exception is raised. + """ + + +class Client(object): + + """This class allows to send requests to a wrapped application. + + The response wrapper can be a class or factory function that takes + three arguments: app_iter, status and headers. The default response + wrapper just returns a tuple. + + Example:: + + class ClientResponse(BaseResponse): + ... + + client = Client(MyApplication(), response_wrapper=ClientResponse) + + The use_cookies parameter indicates whether cookies should be stored and + sent for subsequent requests. This is True by default, but passing False + will disable this behaviour. + + If you want to request some subdomain of your application you may set + `allow_subdomain_redirects` to `True` as if not no external redirects + are allowed. + + .. versionadded:: 0.5 + `use_cookies` is new in this version. Older versions did not provide + builtin cookie support. + """ + + def __init__(self, application, response_wrapper=None, use_cookies=True, + allow_subdomain_redirects=False): + self.application = application + self.response_wrapper = response_wrapper + if use_cookies: + self.cookie_jar = _TestCookieJar() + else: + self.cookie_jar = None + self.allow_subdomain_redirects = allow_subdomain_redirects + + def set_cookie(self, server_name, key, value='', max_age=None, + expires=None, path='/', domain=None, secure=None, + httponly=False, charset='utf-8'): + """Sets a cookie in the client's cookie jar. The server name + is required and has to match the one that is also passed to + the open call. + """ + assert self.cookie_jar is not None, 'cookies disabled' + header = dump_cookie(key, value, max_age, expires, path, domain, + secure, httponly, charset) + environ = create_environ(path, base_url='http://' + server_name) + headers = [('Set-Cookie', header)] + self.cookie_jar.extract_wsgi(environ, headers) + + def delete_cookie(self, server_name, key, path='/', domain=None): + """Deletes a cookie in the test client.""" + self.set_cookie(server_name, key, expires=0, max_age=0, + path=path, domain=domain) + + def run_wsgi_app(self, environ, buffered=False): + """Runs the wrapped WSGI app with the given environment.""" + if self.cookie_jar is not None: + self.cookie_jar.inject_wsgi(environ) + rv = run_wsgi_app(self.application, environ, buffered=buffered) + if self.cookie_jar is not None: + self.cookie_jar.extract_wsgi(environ, rv[2]) + return rv + + def resolve_redirect(self, response, new_location, environ, buffered=False): + """Resolves a single redirect and triggers the request again + directly on this redirect client. + """ + scheme, netloc, script_root, qs, anchor = url_parse(new_location) + base_url = url_unparse((scheme, netloc, '', '', '')).rstrip('/') + '/' + + cur_server_name = netloc.split(':', 1)[0].split('.') + real_server_name = get_host(environ).rsplit(':', 1)[0].split('.') + + if self.allow_subdomain_redirects: + allowed = cur_server_name[-len(real_server_name):] == real_server_name + else: + allowed = cur_server_name == real_server_name + + if not allowed: + raise RuntimeError('%r does not support redirect to ' + 'external targets' % self.__class__) + + status_code = int(response[1].split(None, 1)[0]) + if status_code == 307: + method = environ['REQUEST_METHOD'] + else: + method = 'GET' + + # For redirect handling we temporarily disable the response + # wrapper. This is not threadsafe but not a real concern + # since the test client must not be shared anyways. + old_response_wrapper = self.response_wrapper + self.response_wrapper = None + try: + return self.open(path=script_root, base_url=base_url, + query_string=qs, as_tuple=True, + buffered=buffered, method=method) + finally: + self.response_wrapper = old_response_wrapper + + def open(self, *args, **kwargs): + """Takes the same arguments as the :class:`EnvironBuilder` class with + some additions: You can provide a :class:`EnvironBuilder` or a WSGI + environment as only argument instead of the :class:`EnvironBuilder` + arguments and two optional keyword arguments (`as_tuple`, `buffered`) + that change the type of the return value or the way the application is + executed. + + .. versionchanged:: 0.5 + If a dict is provided as file in the dict for the `data` parameter + the content type has to be called `content_type` now instead of + `mimetype`. This change was made for consistency with + :class:`werkzeug.FileWrapper`. + + The `follow_redirects` parameter was added to :func:`open`. + + Additional parameters: + + :param as_tuple: Returns a tuple in the form ``(environ, result)`` + :param buffered: Set this to True to buffer the application run. + This will automatically close the application for + you as well. + :param follow_redirects: Set this to True if the `Client` should + follow HTTP redirects. + """ + as_tuple = kwargs.pop('as_tuple', False) + buffered = kwargs.pop('buffered', False) + follow_redirects = kwargs.pop('follow_redirects', False) + environ = None + if not kwargs and len(args) == 1: + if isinstance(args[0], EnvironBuilder): + environ = args[0].get_environ() + elif isinstance(args[0], dict): + environ = args[0] + if environ is None: + builder = EnvironBuilder(*args, **kwargs) + try: + environ = builder.get_environ() + finally: + builder.close() + + response = self.run_wsgi_app(environ, buffered=buffered) + + # handle redirects + redirect_chain = [] + while 1: + status_code = int(response[1].split(None, 1)[0]) + if status_code not in (301, 302, 303, 305, 307) \ + or not follow_redirects: + break + new_location = response[2]['location'] + new_redirect_entry = (new_location, status_code) + if new_redirect_entry in redirect_chain: + raise ClientRedirectError('loop detected') + redirect_chain.append(new_redirect_entry) + environ, response = self.resolve_redirect(response, new_location, + environ, + buffered=buffered) + + if self.response_wrapper is not None: + response = self.response_wrapper(*response) + if as_tuple: + return environ, response + return response + + def get(self, *args, **kw): + """Like open but method is enforced to GET.""" + kw['method'] = 'GET' + return self.open(*args, **kw) + + def patch(self, *args, **kw): + """Like open but method is enforced to PATCH.""" + kw['method'] = 'PATCH' + return self.open(*args, **kw) + + def post(self, *args, **kw): + """Like open but method is enforced to POST.""" + kw['method'] = 'POST' + return self.open(*args, **kw) + + def head(self, *args, **kw): + """Like open but method is enforced to HEAD.""" + kw['method'] = 'HEAD' + return self.open(*args, **kw) + + def put(self, *args, **kw): + """Like open but method is enforced to PUT.""" + kw['method'] = 'PUT' + return self.open(*args, **kw) + + def delete(self, *args, **kw): + """Like open but method is enforced to DELETE.""" + kw['method'] = 'DELETE' + return self.open(*args, **kw) + + def options(self, *args, **kw): + """Like open but method is enforced to OPTIONS.""" + kw['method'] = 'OPTIONS' + return self.open(*args, **kw) + + def trace(self, *args, **kw): + """Like open but method is enforced to TRACE.""" + kw['method'] = 'TRACE' + return self.open(*args, **kw) + + def __repr__(self): + return '<%s %r>' % ( + self.__class__.__name__, + self.application + ) + + +def create_environ(*args, **kwargs): + """Create a new WSGI environ dict based on the values passed. The first + parameter should be the path of the request which defaults to '/'. The + second one can either be an absolute path (in that case the host is + localhost:80) or a full path to the request with scheme, netloc port and + the path to the script. + + This accepts the same arguments as the :class:`EnvironBuilder` + constructor. + + .. versionchanged:: 0.5 + This function is now a thin wrapper over :class:`EnvironBuilder` which + was added in 0.5. The `headers`, `environ_base`, `environ_overrides` + and `charset` parameters were added. + """ + builder = EnvironBuilder(*args, **kwargs) + try: + return builder.get_environ() + finally: + builder.close() + + +def run_wsgi_app(app, environ, buffered=False): + """Return a tuple in the form (app_iter, status, headers) of the + application output. This works best if you pass it an application that + returns an iterator all the time. + + Sometimes applications may use the `write()` callable returned + by the `start_response` function. This tries to resolve such edge + cases automatically. But if you don't get the expected output you + should set `buffered` to `True` which enforces buffering. + + If passed an invalid WSGI application the behavior of this function is + undefined. Never pass non-conforming WSGI applications to this function. + + :param app: the application to execute. + :param buffered: set to `True` to enforce buffering. + :return: tuple in the form ``(app_iter, status, headers)`` + """ + environ = _get_environ(environ) + response = [] + buffer = [] + + def start_response(status, headers, exc_info=None): + if exc_info is not None: + reraise(*exc_info) + response[:] = [status, headers] + return buffer.append + + app_rv = app(environ, start_response) + close_func = getattr(app_rv, 'close', None) + app_iter = iter(app_rv) + + # when buffering we emit the close call early and convert the + # application iterator into a regular list + if buffered: + try: + app_iter = list(app_iter) + finally: + if close_func is not None: + close_func() + + # otherwise we iterate the application iter until we have a response, chain + # the already received data with the already collected data and wrap it in + # a new `ClosingIterator` if we need to restore a `close` callable from the + # original return value. + else: + while not response: + buffer.append(next(app_iter)) + if buffer: + app_iter = chain(buffer, app_iter) + if close_func is not None and app_iter is not app_rv: + app_iter = ClosingIterator(app_iter, close_func) + + return app_iter, response[0], Headers(response[1]) diff --git a/deps/werkzeug/testapp.py b/deps/werkzeug/testapp.py new file mode 100644 index 00000000..595555a0 --- /dev/null +++ b/deps/werkzeug/testapp.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.testapp + ~~~~~~~~~~~~~~~~ + + Provide a small test application that can be used to test a WSGI server + and check it for WSGI compliance. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import os +import sys +import werkzeug +from textwrap import wrap +from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response +from werkzeug.utils import escape +import base64 + +logo = Response(base64.b64decode(''' +R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP///////// +//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv +nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25 +7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq +ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX +m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G +p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo +SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf +78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA +ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA +tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx +w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx +lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45 +Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB +yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd +dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r +idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh +EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8 +ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64 +gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C +JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y +Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9 +YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX +c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb +qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL +cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG +cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2 +KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe +EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb +UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB +Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z +aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn +kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs +='''), mimetype='image/png') + + +TEMPLATE = u'''\ + +WSGI Information + +
+ +

WSGI Information

+

+ This page displays all available information about the WSGI server and + the underlying Python interpreter. +

Python Interpreter

+ + + + + + +
Python Version + %(python_version)s +
Platform + %(platform)s [%(os)s] +
API Version + %(api_version)s +
Byteorder + %(byteorder)s +
Werkzeug Version + %(werkzeug_version)s +
+

WSGI Environment

+ %(wsgi_env)s
+

Installed Eggs

+

+ The following python packages were installed on the system as + Python eggs: +

    %(python_eggs)s
+

System Path

+

+ The following paths are the current contents of the load path. The + following entries are looked up for Python packages. Note that not + all items in this path are folders. Gray and underlined items are + entries pointing to invalid resources or used by custom import hooks + such as the zip importer. +

+ Items with a bright background were expanded for display from a relative + path. If you encounter such paths in the output you might want to check + your setup as relative paths are usually problematic in multithreaded + environments. +

    %(sys_path)s
+
+''' + + +def iter_sys_path(): + if os.name == 'posix': + def strip(x): + prefix = os.path.expanduser('~') + if x.startswith(prefix): + x = '~' + x[len(prefix):] + return x + else: + strip = lambda x: x + + cwd = os.path.abspath(os.getcwd()) + for item in sys.path: + path = os.path.join(cwd, item or os.path.curdir) + yield strip(os.path.normpath(path)), \ + not os.path.isdir(path), path != item + + +def render_testapp(req): + try: + import pkg_resources + except ImportError: + eggs = () + else: + eggs = sorted(pkg_resources.working_set, + key=lambda x: x.project_name.lower()) + python_eggs = [] + for egg in eggs: + try: + version = egg.version + except (ValueError, AttributeError): + version = 'unknown' + python_eggs.append('
  • %s [%s]' % ( + escape(egg.project_name), + escape(version) + )) + + wsgi_env = [] + sorted_environ = sorted(req.environ.items(), + key=lambda x: repr(x[0]).lower()) + for key, value in sorted_environ: + wsgi_env.append('%s%s' % ( + escape(str(key)), + ' '.join(wrap(escape(repr(value)))) + )) + + sys_path = [] + for item, virtual, expanded in iter_sys_path(): + class_ = [] + if virtual: + class_.append('virtual') + if expanded: + class_.append('exp') + sys_path.append('%s' % ( + class_ and ' class="%s"' % ' '.join(class_) or '', + escape(item) + )) + + return (TEMPLATE % { + 'python_version': '
    '.join(escape(sys.version).splitlines()), + 'platform': escape(sys.platform), + 'os': escape(os.name), + 'api_version': sys.api_version, + 'byteorder': sys.byteorder, + 'werkzeug_version': werkzeug.__version__, + 'python_eggs': '\n'.join(python_eggs), + 'wsgi_env': '\n'.join(wsgi_env), + 'sys_path': '\n'.join(sys_path) + }).encode('utf-8') + + +def test_app(environ, start_response): + """Simple test application that dumps the environment. You can use + it to check if Werkzeug is working properly: + + .. sourcecode:: pycon + + >>> from werkzeug.serving import run_simple + >>> from werkzeug.testapp import test_app + >>> run_simple('localhost', 3000, test_app) + * Running on http://localhost:3000/ + + The application displays important information from the WSGI environment, + the Python interpreter and the installed libraries. + """ + req = Request(environ, populate_request=False) + if req.args.get('resource') == 'logo': + response = logo + else: + response = Response(render_testapp(req), mimetype='text/html') + return response(environ, start_response) + + +if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 5000, test_app, use_reloader=True) diff --git a/deps/werkzeug/urls.py b/deps/werkzeug/urls.py new file mode 100644 index 00000000..9d04a107 --- /dev/null +++ b/deps/werkzeug/urls.py @@ -0,0 +1,1004 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.urls + ~~~~~~~~~~~~~ + + ``werkzeug.urls`` used to provide several wrapper functions for Python 2 + urlparse, whose main purpose were to work around the behavior of the Py2 + stdlib and its lack of unicode support. While this was already a somewhat + inconvenient situation, it got even more complicated because Python 3's + ``urllib.parse`` actually does handle unicode properly. In other words, + this module would wrap two libraries with completely different behavior. So + now this module contains a 2-and-3-compatible backport of Python 3's + ``urllib.parse``, which is mostly API-compatible. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import os +import re +from werkzeug._compat import text_type, PY2, to_unicode, \ + to_native, implements_to_string, try_coerce_native, \ + normalize_string_tuple, make_literal_wrapper, \ + fix_tuple_repr +from werkzeug._internal import _encode_idna, _decode_idna +from werkzeug.datastructures import MultiDict, iter_multi_items +from collections import namedtuple + + +# A regular expression for what a valid schema looks like +_scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$') + +# Characters that are safe in any part of an URL. +_always_safe = (b'abcdefghijklmnopqrstuvwxyz' + b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+') + +_hexdigits = '0123456789ABCDEFabcdef' +_hextobyte = dict( + ((a + b).encode(), int(a + b, 16)) + for a in _hexdigits for b in _hexdigits +) + + +_URLTuple = fix_tuple_repr(namedtuple( + '_URLTuple', + ['scheme', 'netloc', 'path', 'query', 'fragment'] +)) + + +class BaseURL(_URLTuple): + + '''Superclass of :py:class:`URL` and :py:class:`BytesURL`.''' + __slots__ = () + + def replace(self, **kwargs): + """Return an URL with the same values, except for those parameters + given new values by whichever keyword arguments are specified.""" + return self._replace(**kwargs) + + @property + def host(self): + """The host part of the URL if available, otherwise `None`. The + host is either the hostname or the IP address mentioned in the + URL. It will not contain the port. + """ + return self._split_host()[0] + + @property + def ascii_host(self): + """Works exactly like :attr:`host` but will return a result that + is restricted to ASCII. If it finds a netloc that is not ASCII + it will attempt to idna decode it. This is useful for socket + operations when the URL might include internationalized characters. + """ + rv = self.host + if rv is not None and isinstance(rv, text_type): + try: + rv = _encode_idna(rv) + except UnicodeError: + rv = rv.encode('ascii', 'ignore') + return to_native(rv, 'ascii', 'ignore') + + @property + def port(self): + """The port in the URL as an integer if it was present, `None` + otherwise. This does not fill in default ports. + """ + try: + rv = int(to_native(self._split_host()[1])) + if 0 <= rv <= 65535: + return rv + except (ValueError, TypeError): + pass + + @property + def auth(self): + """The authentication part in the URL if available, `None` + otherwise. + """ + return self._split_netloc()[0] + + @property + def username(self): + """The username if it was part of the URL, `None` otherwise. + This undergoes URL decoding and will always be a unicode string. + """ + rv = self._split_auth()[0] + if rv is not None: + return _url_unquote_legacy(rv) + + @property + def raw_username(self): + """The username if it was part of the URL, `None` otherwise. + Unlike :attr:`username` this one is not being decoded. + """ + return self._split_auth()[0] + + @property + def password(self): + """The password if it was part of the URL, `None` otherwise. + This undergoes URL decoding and will always be a unicode string. + """ + rv = self._split_auth()[1] + if rv is not None: + return _url_unquote_legacy(rv) + + @property + def raw_password(self): + """The password if it was part of the URL, `None` otherwise. + Unlike :attr:`password` this one is not being decoded. + """ + return self._split_auth()[1] + + def decode_query(self, *args, **kwargs): + """Decodes the query part of the URL. Ths is a shortcut for + calling :func:`url_decode` on the query argument. The arguments and + keyword arguments are forwarded to :func:`url_decode` unchanged. + """ + return url_decode(self.query, *args, **kwargs) + + def join(self, *args, **kwargs): + """Joins this URL with another one. This is just a convenience + function for calling into :meth:`url_join` and then parsing the + return value again. + """ + return url_parse(url_join(self, *args, **kwargs)) + + def to_url(self): + """Returns a URL string or bytes depending on the type of the + information stored. This is just a convenience function + for calling :meth:`url_unparse` for this URL. + """ + return url_unparse(self) + + def decode_netloc(self): + """Decodes the netloc part into a string.""" + rv = _decode_idna(self.host or '') + + if ':' in rv: + rv = '[%s]' % rv + port = self.port + if port is not None: + rv = '%s:%d' % (rv, port) + auth = ':'.join(filter(None, [ + _url_unquote_legacy(self.raw_username or '', '/:%@'), + _url_unquote_legacy(self.raw_password or '', '/:%@'), + ])) + if auth: + rv = '%s@%s' % (auth, rv) + return rv + + def to_uri_tuple(self): + """Returns a :class:`BytesURL` tuple that holds a URI. This will + encode all the information in the URL properly to ASCII using the + rules a web browser would follow. + + It's usually more interesting to directly call :meth:`iri_to_uri` which + will return a string. + """ + return url_parse(iri_to_uri(self).encode('ascii')) + + def to_iri_tuple(self): + """Returns a :class:`URL` tuple that holds a IRI. This will try + to decode as much information as possible in the URL without + losing information similar to how a web browser does it for the + URL bar. + + It's usually more interesting to directly call :meth:`uri_to_iri` which + will return a string. + """ + return url_parse(uri_to_iri(self)) + + def get_file_location(self, pathformat=None): + """Returns a tuple with the location of the file in the form + ``(server, location)``. If the netloc is empty in the URL or + points to localhost, it's represented as ``None``. + + The `pathformat` by default is autodetection but needs to be set + when working with URLs of a specific system. The supported values + are ``'windows'`` when working with Windows or DOS paths and + ``'posix'`` when working with posix paths. + + If the URL does not point to to a local file, the server and location + are both represented as ``None``. + + :param pathformat: The expected format of the path component. + Currently ``'windows'`` and ``'posix'`` are + supported. Defaults to ``None`` which is + autodetect. + """ + if self.scheme != 'file': + return None, None + + path = url_unquote(self.path) + host = self.netloc or None + + if pathformat is None: + if os.name == 'nt': + pathformat = 'windows' + else: + pathformat = 'posix' + + if pathformat == 'windows': + if path[:1] == '/' and path[1:2].isalpha() and path[2:3] in '|:': + path = path[1:2] + ':' + path[3:] + windows_share = path[:3] in ('\\' * 3, '/' * 3) + import ntpath + path = ntpath.normpath(path) + # Windows shared drives are represented as ``\\host\\directory``. + # That results in a URL like ``file://///host/directory``, and a + # path like ``///host/directory``. We need to special-case this + # because the path contains the hostname. + if windows_share and host is None: + parts = path.lstrip('\\').split('\\', 1) + if len(parts) == 2: + host, path = parts + else: + host = parts[0] + path = '' + elif pathformat == 'posix': + import posixpath + path = posixpath.normpath(path) + else: + raise TypeError('Invalid path format %s' % repr(pathformat)) + + if host in ('127.0.0.1', '::1', 'localhost'): + host = None + + return host, path + + def _split_netloc(self): + if self._at in self.netloc: + return self.netloc.split(self._at, 1) + return None, self.netloc + + def _split_auth(self): + auth = self._split_netloc()[0] + if not auth: + return None, None + if self._colon not in auth: + return auth, None + return auth.split(self._colon, 1) + + def _split_host(self): + rv = self._split_netloc()[1] + if not rv: + return None, None + + if not rv.startswith(self._lbracket): + if self._colon in rv: + return rv.split(self._colon, 1) + return rv, None + + idx = rv.find(self._rbracket) + if idx < 0: + return rv, None + + host = rv[1:idx] + rest = rv[idx + 1:] + if rest.startswith(self._colon): + return host, rest[1:] + return host, None + + +@implements_to_string +class URL(BaseURL): + + """Represents a parsed URL. This behaves like a regular tuple but + also has some extra attributes that give further insight into the + URL. + """ + __slots__ = () + _at = '@' + _colon = ':' + _lbracket = '[' + _rbracket = ']' + + def __str__(self): + return self.to_url() + + def encode_netloc(self): + """Encodes the netloc part to an ASCII safe URL as bytes.""" + rv = self.ascii_host or '' + if ':' in rv: + rv = '[%s]' % rv + port = self.port + if port is not None: + rv = '%s:%d' % (rv, port) + auth = ':'.join(filter(None, [ + url_quote(self.raw_username or '', 'utf-8', 'strict', '/:%'), + url_quote(self.raw_password or '', 'utf-8', 'strict', '/:%'), + ])) + if auth: + rv = '%s@%s' % (auth, rv) + return to_native(rv) + + def encode(self, charset='utf-8', errors='replace'): + """Encodes the URL to a tuple made out of bytes. The charset is + only being used for the path, query and fragment. + """ + return BytesURL( + self.scheme.encode('ascii'), + self.encode_netloc(), + self.path.encode(charset, errors), + self.query.encode(charset, errors), + self.fragment.encode(charset, errors) + ) + + +class BytesURL(BaseURL): + + """Represents a parsed URL in bytes.""" + __slots__ = () + _at = b'@' + _colon = b':' + _lbracket = b'[' + _rbracket = b']' + + def __str__(self): + return self.to_url().decode('utf-8', 'replace') + + def encode_netloc(self): + """Returns the netloc unchanged as bytes.""" + return self.netloc + + def decode(self, charset='utf-8', errors='replace'): + """Decodes the URL to a tuple made out of strings. The charset is + only being used for the path, query and fragment. + """ + return URL( + self.scheme.decode('ascii'), + self.decode_netloc(), + self.path.decode(charset, errors), + self.query.decode(charset, errors), + self.fragment.decode(charset, errors) + ) + + +def _unquote_to_bytes(string, unsafe=''): + if isinstance(string, text_type): + string = string.encode('utf-8') + if isinstance(unsafe, text_type): + unsafe = unsafe.encode('utf-8') + unsafe = frozenset(bytearray(unsafe)) + bits = iter(string.split(b'%')) + result = bytearray(next(bits, b'')) + for item in bits: + try: + char = _hextobyte[item[:2]] + if char in unsafe: + raise KeyError() + result.append(char) + result.extend(item[2:]) + except KeyError: + result.extend(b'%') + result.extend(item) + return bytes(result) + + +def _url_encode_impl(obj, charset, encode_keys, sort, key): + iterable = iter_multi_items(obj) + if sort: + iterable = sorted(iterable, key=key) + for key, value in iterable: + if value is None: + continue + if not isinstance(key, bytes): + key = text_type(key).encode(charset) + if not isinstance(value, bytes): + value = text_type(value).encode(charset) + yield url_quote_plus(key) + '=' + url_quote_plus(value) + + +def _url_unquote_legacy(value, unsafe=''): + try: + return url_unquote(value, charset='utf-8', + errors='strict', unsafe=unsafe) + except UnicodeError: + return url_unquote(value, charset='latin1', unsafe=unsafe) + + +def url_parse(url, scheme=None, allow_fragments=True): + """Parses a URL from a string into a :class:`URL` tuple. If the URL + is lacking a scheme it can be provided as second argument. Otherwise, + it is ignored. Optionally fragments can be stripped from the URL + by setting `allow_fragments` to `False`. + + The inverse of this function is :func:`url_unparse`. + + :param url: the URL to parse. + :param scheme: the default schema to use if the URL is schemaless. + :param allow_fragments: if set to `False` a fragment will be removed + from the URL. + """ + s = make_literal_wrapper(url) + is_text_based = isinstance(url, text_type) + + if scheme is None: + scheme = s('') + netloc = query = fragment = s('') + i = url.find(s(':')) + if i > 0 and _scheme_re.match(to_native(url[:i], errors='replace')): + # make sure "iri" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i + 1:] + if not rest or any(c not in s('0123456789') for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == s('//'): + delim = len(url) + for c in s('/?#'): + wdelim = url.find(c, 2) + if wdelim >= 0: + delim = min(delim, wdelim) + netloc, url = url[2:delim], url[delim:] + if (s('[') in netloc and s(']') not in netloc) or \ + (s(']') in netloc and s('[') not in netloc): + raise ValueError('Invalid IPv6 URL') + + if allow_fragments and s('#') in url: + url, fragment = url.split(s('#'), 1) + if s('?') in url: + url, query = url.split(s('?'), 1) + + result_type = is_text_based and URL or BytesURL + return result_type(scheme, netloc, url, query, fragment) + + +def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''): + """URL encode a single string with a given encoding. + + :param s: the string to quote. + :param charset: the charset to be used. + :param safe: an optional sequence of safe characters. + :param unsafe: an optional sequence of unsafe characters. + + .. versionadded:: 0.9.2 + The `unsafe` parameter was added. + """ + if not isinstance(string, (text_type, bytes, bytearray)): + string = text_type(string) + if isinstance(string, text_type): + string = string.encode(charset, errors) + if isinstance(safe, text_type): + safe = safe.encode(charset, errors) + if isinstance(unsafe, text_type): + unsafe = unsafe.encode(charset, errors) + safe = frozenset(bytearray(safe) + _always_safe) - frozenset(bytearray(unsafe)) + rv = bytearray() + for char in bytearray(string): + if char in safe: + rv.append(char) + else: + rv.extend(('%%%02X' % char).encode('ascii')) + return to_native(bytes(rv)) + + +def url_quote_plus(string, charset='utf-8', errors='strict', safe=''): + """URL encode a single string with the given encoding and convert + whitespace to "+". + + :param s: The string to quote. + :param charset: The charset to be used. + :param safe: An optional sequence of safe characters. + """ + return url_quote(string, charset, errors, safe + ' ', '+').replace(' ', '+') + + +def url_unparse(components): + """The reverse operation to :meth:`url_parse`. This accepts arbitrary + as well as :class:`URL` tuples and returns a URL as a string. + + :param components: the parsed URL as tuple which should be converted + into a URL string. + """ + scheme, netloc, path, query, fragment = \ + normalize_string_tuple(components) + s = make_literal_wrapper(scheme) + url = s('') + + # We generally treat file:///x and file:/x the same which is also + # what browsers seem to do. This also allows us to ignore a schema + # register for netloc utilization or having to differenciate between + # empty and missing netloc. + if netloc or (scheme and path.startswith(s('/'))): + if path and path[:1] != s('/'): + path = s('/') + path + url = s('//') + (netloc or s('')) + path + elif path: + url += path + if scheme: + url = scheme + s(':') + url + if query: + url = url + s('?') + query + if fragment: + url = url + s('#') + fragment + return url + + +def url_unquote(string, charset='utf-8', errors='replace', unsafe=''): + """URL decode a single string with a given encoding. If the charset + is set to `None` no unicode decoding is performed and raw bytes + are returned. + + :param s: the string to unquote. + :param charset: the charset of the query string. If set to `None` + no unicode decoding will take place. + :param errors: the error handling for the charset decoding. + """ + rv = _unquote_to_bytes(string, unsafe) + if charset is not None: + rv = rv.decode(charset, errors) + return rv + + +def url_unquote_plus(s, charset='utf-8', errors='replace'): + """URL decode a single string with the given `charset` and decode "+" to + whitespace. + + Per default encoding errors are ignored. If you want a different behavior + you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a + :exc:`HTTPUnicodeError` is raised. + + :param s: The string to unquote. + :param charset: the charset of the query string. If set to `None` + no unicode decoding will take place. + :param errors: The error handling for the `charset` decoding. + """ + if isinstance(s, text_type): + s = s.replace(u'+', u' ') + else: + s = s.replace(b'+', b' ') + return url_unquote(s, charset, errors) + + +def url_fix(s, charset='utf-8'): + r"""Sometimes you get an URL by a user that just isn't a real URL because + it contains unsafe characters like ' ' and so on. This function can fix + some of the problems in a similar way browsers handle data entered by the + user: + + >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') + 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' + + :param s: the string with the URL to fix. + :param charset: The target charset for the URL if the url was given as + unicode string. + """ + # First step is to switch to unicode processing and to convert + # backslashes (which are invalid in URLs anyways) to slashes. This is + # consistent with what Chrome does. + s = to_unicode(s, charset, 'replace').replace('\\', '/') + + # For the specific case that we look like a malformed windows URL + # we want to fix this up manually: + if s.startswith('file://') and s[7:8].isalpha() and s[8:10] in (':/', '|/'): + s = 'file:///' + s[7:] + + url = url_parse(s) + path = url_quote(url.path, charset, safe='/%+$!*\'(),') + qs = url_quote_plus(url.query, charset, safe=':&%=+$!*\'(),') + anchor = url_quote_plus(url.fragment, charset, safe=':&%=+$!*\'(),') + return to_native(url_unparse((url.scheme, url.encode_netloc(), + path, qs, anchor))) + + +def uri_to_iri(uri, charset='utf-8', errors='replace'): + r""" + Converts a URI in a given charset to a IRI. + + Examples for URI versus IRI: + + >>> uri_to_iri(b'http://xn--n3h.net/') + u'http://\u2603.net/' + >>> uri_to_iri(b'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th') + u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th' + + Query strings are left unchanged: + + >>> uri_to_iri('/?foo=24&x=%26%2f') + u'/?foo=24&x=%26%2f' + + .. versionadded:: 0.6 + + :param uri: The URI to convert. + :param charset: The charset of the URI. + :param errors: The error handling on decode. + """ + if isinstance(uri, tuple): + uri = url_unparse(uri) + uri = url_parse(to_unicode(uri, charset)) + path = url_unquote(uri.path, charset, errors, '%/;?') + query = url_unquote(uri.query, charset, errors, '%;/?:@&=+,$#') + fragment = url_unquote(uri.fragment, charset, errors, '%;/?:@&=+,$#') + return url_unparse((uri.scheme, uri.decode_netloc(), + path, query, fragment)) + + +def iri_to_uri(iri, charset='utf-8', errors='strict', safe_conversion=False): + r""" + Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always + uses utf-8 URLs internally because this is what browsers and HTTP do as + well. In some places where it accepts an URL it also accepts a unicode IRI + and converts it into a URI. + + Examples for IRI versus URI: + + >>> iri_to_uri(u'http://☃.net/') + 'http://xn--n3h.net/' + >>> iri_to_uri(u'http://üser:pässword@☃.net/påth') + 'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th' + + There is a general problem with IRI and URI conversion with some + protocols that appear in the wild that are in violation of the URI + specification. In places where Werkzeug goes through a forced IRI to + URI conversion it will set the `safe_conversion` flag which will + not perform a conversion if the end result is already ASCII. This + can mean that the return value is not an entirely correct URI but + it will not destroy such invalid URLs in the process. + + As an example consider the following two IRIs:: + + magnet:?xt=uri:whatever + itms-services://?action=download-manifest + + The internal representation after parsing of those URLs is the same + and there is no way to reconstruct the original one. If safe + conversion is enabled however this function becomes a noop for both of + those strings as they both can be considered URIs. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9.6 + The `safe_conversion` parameter was added. + + :param iri: The IRI to convert. + :param charset: The charset for the URI. + :param safe_conversion: indicates if a safe conversion should take place. + For more information see the explanation above. + """ + if isinstance(iri, tuple): + iri = url_unparse(iri) + + if safe_conversion: + try: + native_iri = to_native(iri) + ascii_iri = to_native(iri).encode('ascii') + if ascii_iri.split() == [ascii_iri]: + return native_iri + except UnicodeError: + pass + + iri = url_parse(to_unicode(iri, charset, errors)) + + netloc = iri.encode_netloc() + path = url_quote(iri.path, charset, errors, '/:~+%') + query = url_quote(iri.query, charset, errors, '%&[]:;$*()+,!?*/=') + fragment = url_quote(iri.fragment, charset, errors, '=%&[]:;$()+,!?*/') + + return to_native(url_unparse((iri.scheme, netloc, + path, query, fragment))) + + +def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True, + errors='replace', separator='&', cls=None): + """ + Parse a querystring and return it as :class:`MultiDict`. There is a + difference in key decoding on different Python versions. On Python 3 + keys will always be fully decoded whereas on Python 2, keys will + remain bytestrings if they fit into ASCII. On 2.x keys can be forced + to be unicode by setting `decode_keys` to `True`. + + If the charset is set to `None` no unicode decoding will happen and + raw bytes will be returned. + + Per default a missing value for a key will default to an empty key. If + you don't want that behavior you can set `include_empty` to `False`. + + Per default encoding errors are ignored. If you want a different behavior + you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a + `HTTPUnicodeError` is raised. + + .. versionchanged:: 0.5 + In previous versions ";" and "&" could be used for url decoding. + This changed in 0.5 where only "&" is supported. If you want to + use ";" instead a different `separator` can be provided. + + The `cls` parameter was added. + + :param s: a string with the query string to decode. + :param charset: the charset of the query string. If set to `None` + no unicode decoding will take place. + :param decode_keys: Used on Python 2.x to control whether keys should + be forced to be unicode objects. If set to `True` + then keys will be unicode in all cases. Otherwise, + they remain `str` if they fit into ASCII. + :param include_empty: Set to `False` if you don't want empty values to + appear in the dict. + :param errors: the decoding error behavior. + :param separator: the pair separator to be used, defaults to ``&`` + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + """ + if cls is None: + cls = MultiDict + if isinstance(s, text_type) and not isinstance(separator, text_type): + separator = separator.decode(charset or 'ascii') + elif isinstance(s, bytes) and not isinstance(separator, bytes): + separator = separator.encode(charset or 'ascii') + return cls(_url_decode_impl(s.split(separator), charset, decode_keys, + include_empty, errors)) + + +def url_decode_stream(stream, charset='utf-8', decode_keys=False, + include_empty=True, errors='replace', separator='&', + cls=None, limit=None, return_iterator=False): + """Works like :func:`url_decode` but decodes a stream. The behavior + of stream and limit follows functions like + :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is + directly fed to the `cls` so you can consume the data while it's + parsed. + + .. versionadded:: 0.8 + + :param stream: a stream with the encoded querystring + :param charset: the charset of the query string. If set to `None` + no unicode decoding will take place. + :param decode_keys: Used on Python 2.x to control whether keys should + be forced to be unicode objects. If set to `True`, + keys will be unicode in all cases. Otherwise, they + remain `str` if they fit into ASCII. + :param include_empty: Set to `False` if you don't want empty values to + appear in the dict. + :param errors: the decoding error behavior. + :param separator: the pair separator to be used, defaults to ``&`` + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param limit: the content length of the URL data. Not necessary if + a limited stream is provided. + :param return_iterator: if set to `True` the `cls` argument is ignored + and an iterator over all decoded pairs is + returned + """ + from werkzeug.wsgi import make_chunk_iter + if return_iterator: + cls = lambda x: x + elif cls is None: + cls = MultiDict + pair_iter = make_chunk_iter(stream, separator, limit) + return cls(_url_decode_impl(pair_iter, charset, decode_keys, + include_empty, errors)) + + +def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors): + for pair in pair_iter: + if not pair: + continue + s = make_literal_wrapper(pair) + equal = s('=') + if equal in pair: + key, value = pair.split(equal, 1) + else: + if not include_empty: + continue + key = pair + value = s('') + key = url_unquote_plus(key, charset, errors) + if charset is not None and PY2 and not decode_keys: + key = try_coerce_native(key) + yield key, url_unquote_plus(value, charset, errors) + + +def url_encode(obj, charset='utf-8', encode_keys=False, sort=False, key=None, + separator=b'&'): + """URL encode a dict/`MultiDict`. If a value is `None` it will not appear + in the result string. Per default only values are encoded into the target + charset strings. If `encode_keys` is set to ``True`` unicode keys are + supported too. + + If `sort` is set to `True` the items are sorted by `key` or the default + sorting algorithm. + + .. versionadded:: 0.5 + `sort`, `key`, and `separator` were added. + + :param obj: the object to encode into a query string. + :param charset: the charset of the query string. + :param encode_keys: set to `True` if you have unicode keys. (Ignored on + Python 3.x) + :param sort: set to `True` if you want parameters to be sorted by `key`. + :param separator: the separator to be used for the pairs. + :param key: an optional function to be used for sorting. For more details + check out the :func:`sorted` documentation. + """ + separator = to_native(separator, 'ascii') + return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key)) + + +def url_encode_stream(obj, stream=None, charset='utf-8', encode_keys=False, + sort=False, key=None, separator=b'&'): + """Like :meth:`url_encode` but writes the results to a stream + object. If the stream is `None` a generator over all encoded + pairs is returned. + + .. versionadded:: 0.8 + + :param obj: the object to encode into a query string. + :param stream: a stream to write the encoded object into or `None` if + an iterator over the encoded pairs should be returned. In + that case the separator argument is ignored. + :param charset: the charset of the query string. + :param encode_keys: set to `True` if you have unicode keys. (Ignored on + Python 3.x) + :param sort: set to `True` if you want parameters to be sorted by `key`. + :param separator: the separator to be used for the pairs. + :param key: an optional function to be used for sorting. For more details + check out the :func:`sorted` documentation. + """ + separator = to_native(separator, 'ascii') + gen = _url_encode_impl(obj, charset, encode_keys, sort, key) + if stream is None: + return gen + for idx, chunk in enumerate(gen): + if idx: + stream.write(separator) + stream.write(chunk) + + +def url_join(base, url, allow_fragments=True): + """Join a base URL and a possibly relative URL to form an absolute + interpretation of the latter. + + :param base: the base URL for the join operation. + :param url: the URL to join. + :param allow_fragments: indicates whether fragments should be allowed. + """ + if isinstance(base, tuple): + base = url_unparse(base) + if isinstance(url, tuple): + url = url_unparse(url) + + base, url = normalize_string_tuple((base, url)) + s = make_literal_wrapper(base) + + if not base: + return url + if not url: + return base + + bscheme, bnetloc, bpath, bquery, bfragment = \ + url_parse(base, allow_fragments=allow_fragments) + scheme, netloc, path, query, fragment = \ + url_parse(url, bscheme, allow_fragments) + if scheme != bscheme: + return url + if netloc: + return url_unparse((scheme, netloc, path, query, fragment)) + netloc = bnetloc + + if path[:1] == s('/'): + segments = path.split(s('/')) + elif not path: + segments = bpath.split(s('/')) + if not query: + query = bquery + else: + segments = bpath.split(s('/'))[:-1] + path.split(s('/')) + + # If the rightmost part is "./" we want to keep the slash but + # remove the dot. + if segments[-1] == s('.'): + segments[-1] = s('') + + # Resolve ".." and "." + segments = [segment for segment in segments if segment != s('.')] + while 1: + i = 1 + n = len(segments) - 1 + while i < n: + if segments[i] == s('..') and \ + segments[i - 1] not in (s(''), s('..')): + del segments[i - 1:i + 1] + break + i += 1 + else: + break + + # Remove trailing ".." if the URL is absolute + unwanted_marker = [s(''), s('..')] + while segments[:2] == unwanted_marker: + del segments[1] + + path = s('/').join(segments) + return url_unparse((scheme, netloc, path, query, fragment)) + + +class Href(object): + + """Implements a callable that constructs URLs with the given base. The + function can be called with any number of positional and keyword + arguments which than are used to assemble the URL. Works with URLs + and posix paths. + + Positional arguments are appended as individual segments to + the path of the URL: + + >>> href = Href('/foo') + >>> href('bar', 23) + '/foo/bar/23' + >>> href('foo', bar=23) + '/foo/foo?bar=23' + + If any of the arguments (positional or keyword) evaluates to `None` it + will be skipped. If no keyword arguments are given the last argument + can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass), + otherwise the keyword arguments are used for the query parameters, cutting + off the first trailing underscore of the parameter name: + + >>> href(is_=42) + '/foo?is=42' + >>> href({'foo': 'bar'}) + '/foo?foo=bar' + + Combining of both methods is not allowed: + + >>> href({'foo': 'bar'}, bar=42) + Traceback (most recent call last): + ... + TypeError: keyword arguments and query-dicts can't be combined + + Accessing attributes on the href object creates a new href object with + the attribute name as prefix: + + >>> bar_href = href.bar + >>> bar_href("blub") + '/foo/bar/blub' + + If `sort` is set to `True` the items are sorted by `key` or the default + sorting algorithm: + + >>> href = Href("/", sort=True) + >>> href(a=1, b=2, c=3) + '/?a=1&b=2&c=3' + + .. versionadded:: 0.5 + `sort` and `key` were added. + """ + + def __init__(self, base='./', charset='utf-8', sort=False, key=None): + if not base: + base = './' + self.base = base + self.charset = charset + self.sort = sort + self.key = key + + def __getattr__(self, name): + if name[:2] == '__': + raise AttributeError(name) + base = self.base + if base[-1:] != '/': + base += '/' + return Href(url_join(base, name), self.charset, self.sort, self.key) + + def __call__(self, *path, **query): + if path and isinstance(path[-1], dict): + if query: + raise TypeError('keyword arguments and query-dicts ' + 'can\'t be combined') + query, path = path[-1], path[:-1] + elif query: + query = dict([(k.endswith('_') and k[:-1] or k, v) + for k, v in query.items()]) + path = '/'.join([to_unicode(url_quote(x, self.charset), 'ascii') + for x in path if x is not None]).lstrip('/') + rv = self.base + if path: + if not rv.endswith('/'): + rv += '/' + rv = url_join(rv, './' + path) + if query: + rv += '?' + to_unicode(url_encode(query, self.charset, sort=self.sort, + key=self.key), 'ascii') + return to_native(rv) diff --git a/deps/werkzeug/useragents.py b/deps/werkzeug/useragents.py new file mode 100644 index 00000000..df427441 --- /dev/null +++ b/deps/werkzeug/useragents.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.useragents + ~~~~~~~~~~~~~~~~~~~ + + This module provides a helper to inspect user agent strings. This module + is far from complete but should work for most of the currently available + browsers. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re + + +class UserAgentParser(object): + + """A simple user agent parser. Used by the `UserAgent`.""" + + platforms = ( + ('cros', 'chromeos'), + ('iphone|ios', 'iphone'), + ('ipad', 'ipad'), + (r'darwin|mac|os\s*x', 'macos'), + ('win', 'windows'), + (r'android', 'android'), + (r'x11|lin(\b|ux)?', 'linux'), + ('(sun|i86)os', 'solaris'), + (r'nintendo\s+wii', 'wii'), + ('irix', 'irix'), + ('hp-?ux', 'hpux'), + ('aix', 'aix'), + ('sco|unix_sv', 'sco'), + ('bsd', 'bsd'), + ('amiga', 'amiga'), + ('blackberry|playbook', 'blackberry'), + ('symbian', 'symbian') + ) + browsers = ( + ('googlebot', 'google'), + ('msnbot', 'msn'), + ('yahoo', 'yahoo'), + ('ask jeeves', 'ask'), + (r'aol|america\s+online\s+browser', 'aol'), + ('opera', 'opera'), + ('chrome', 'chrome'), + ('firefox|firebird|phoenix|iceweasel', 'firefox'), + ('galeon', 'galeon'), + ('safari|version', 'safari'), + ('webkit', 'webkit'), + ('camino', 'camino'), + ('konqueror', 'konqueror'), + ('k-meleon', 'kmeleon'), + ('netscape', 'netscape'), + (r'msie|microsoft\s+internet\s+explorer|trident/.+? rv:', 'msie'), + ('lynx', 'lynx'), + ('links', 'links'), + ('seamonkey|mozilla', 'seamonkey') + ) + + _browser_version_re = r'(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?(?i)' + _language_re = re.compile( + r'(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|' + r'(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)' + ) + + def __init__(self): + self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms] + self.browsers = [(b, re.compile(self._browser_version_re % a)) + for a, b in self.browsers] + + def __call__(self, user_agent): + for platform, regex in self.platforms: + match = regex.search(user_agent) + if match is not None: + break + else: + platform = None + for browser, regex in self.browsers: + match = regex.search(user_agent) + if match is not None: + version = match.group(1) + break + else: + browser = version = None + match = self._language_re.search(user_agent) + if match is not None: + language = match.group(1) or match.group(2) + else: + language = None + return platform, browser, version, language + + +class UserAgent(object): + + """Represents a user agent. Pass it a WSGI environment or a user agent + string and you can inspect some of the details from the user agent + string via the attributes. The following attributes exist: + + .. attribute:: string + + the raw user agent string + + .. attribute:: platform + + the browser platform. The following platforms are currently + recognized: + + - `aix` + - `amiga` + - `android` + - `bsd` + - `chromeos` + - `hpux` + - `iphone` + - `ipad` + - `irix` + - `linux` + - `macos` + - `sco` + - `solaris` + - `wii` + - `windows` + + .. attribute:: browser + + the name of the browser. The following browsers are currently + recognized: + + - `aol` * + - `ask` * + - `camino` + - `chrome` + - `firefox` + - `galeon` + - `google` * + - `kmeleon` + - `konqueror` + - `links` + - `lynx` + - `msie` + - `msn` + - `netscape` + - `opera` + - `safari` + - `seamonkey` + - `webkit` + - `yahoo` * + + (Browsers maked with a star (``*``) are crawlers.) + + .. attribute:: version + + the version of the browser + + .. attribute:: language + + the language of the browser + """ + + _parser = UserAgentParser() + + def __init__(self, environ_or_string): + if isinstance(environ_or_string, dict): + environ_or_string = environ_or_string.get('HTTP_USER_AGENT', '') + self.string = environ_or_string + self.platform, self.browser, self.version, self.language = \ + self._parser(environ_or_string) + + def to_header(self): + return self.string + + def __str__(self): + return self.string + + def __nonzero__(self): + return bool(self.browser) + + __bool__ = __nonzero__ + + def __repr__(self): + return '<%s %r/%s>' % ( + self.__class__.__name__, + self.browser, + self.version + ) + + +# conceptionally this belongs in this module but because we want to lazily +# load the user agent module (which happens in wrappers.py) we have to import +# it afterwards. The class itself has the module set to this module so +# pickle, inspect and similar modules treat the object as if it was really +# implemented here. +from werkzeug.wrappers import UserAgentMixin # noqa diff --git a/deps/werkzeug/utils.py b/deps/werkzeug/utils.py new file mode 100644 index 00000000..935029e8 --- /dev/null +++ b/deps/werkzeug/utils.py @@ -0,0 +1,628 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.utils + ~~~~~~~~~~~~~~ + + This module implements various utilities for WSGI applications. Most of + them are used by the request and response wrappers but especially for + middleware development it makes sense to use them without the wrappers. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +import os +import sys +import pkgutil +try: + from html.entities import name2codepoint +except ImportError: + from htmlentitydefs import name2codepoint + +from werkzeug._compat import unichr, text_type, string_types, iteritems, \ + reraise, PY2 +from werkzeug._internal import _DictAccessorProperty, \ + _parse_signature, _missing + + +_format_re = re.compile(r'\$(?:(%s)|\{(%s)\})' % (('[a-zA-Z_][a-zA-Z0-9_]*',) * 2)) +_entity_re = re.compile(r'&([^;]+);') +_filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]') +_windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4', 'LPT1', + 'LPT2', 'LPT3', 'PRN', 'NUL') + + +class cached_property(property): + + """A decorator that converts a function into a lazy property. The + function wrapped is called the first time to retrieve the result + and then that calculated result is used the next time you access + the value:: + + class Foo(object): + + @cached_property + def foo(self): + # calculate something important here + return 42 + + The class has to have a `__dict__` in order for this property to + work. + """ + + # implementation detail: A subclass of python's builtin property + # decorator, we override __get__ to check for a cached value. If one + # choses to invoke __get__ by hand the property will still work as + # expected because the lookup logic is replicated in __get__ for + # manual invocation. + + def __init__(self, func, name=None, doc=None): + self.__name__ = name or func.__name__ + self.__module__ = func.__module__ + self.__doc__ = doc or func.__doc__ + self.func = func + + def __set__(self, obj, value): + obj.__dict__[self.__name__] = value + + def __get__(self, obj, type=None): + if obj is None: + return self + value = obj.__dict__.get(self.__name__, _missing) + if value is _missing: + value = self.func(obj) + obj.__dict__[self.__name__] = value + return value + + +class environ_property(_DictAccessorProperty): + + """Maps request attributes to environment variables. This works not only + for the Werzeug request object, but also any other class with an + environ attribute: + + >>> class Test(object): + ... environ = {'key': 'value'} + ... test = environ_property('key') + >>> var = Test() + >>> var.test + 'value' + + If you pass it a second value it's used as default if the key does not + exist, the third one can be a converter that takes a value and converts + it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value + is used. If no default value is provided `None` is used. + + Per default the property is read only. You have to explicitly enable it + by passing ``read_only=False`` to the constructor. + """ + + read_only = True + + def lookup(self, obj): + return obj.environ + + +class header_property(_DictAccessorProperty): + + """Like `environ_property` but for headers.""" + + def lookup(self, obj): + return obj.headers + + +class HTMLBuilder(object): + + """Helper object for HTML generation. + + Per default there are two instances of that class. The `html` one, and + the `xhtml` one for those two dialects. The class uses keyword parameters + and positional parameters to generate small snippets of HTML. + + Keyword parameters are converted to XML/SGML attributes, positional + arguments are used as children. Because Python accepts positional + arguments before keyword arguments it's a good idea to use a list with the + star-syntax for some children: + + >>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ', + ... html.a('bar', href='bar.html')]) + u'

    foo bar

    ' + + This class works around some browser limitations and can not be used for + arbitrary SGML/XML generation. For that purpose lxml and similar + libraries exist. + + Calling the builder escapes the string passed: + + >>> html.p(html("")) + u'

    <foo>

    ' + """ + + _entity_re = re.compile(r'&([^;]+);') + _entities = name2codepoint.copy() + _entities['apos'] = 39 + _empty_elements = set([ + 'area', 'base', 'basefont', 'br', 'col', 'command', 'embed', 'frame', + 'hr', 'img', 'input', 'keygen', 'isindex', 'link', 'meta', 'param', + 'source', 'wbr' + ]) + _boolean_attributes = set([ + 'selected', 'checked', 'compact', 'declare', 'defer', 'disabled', + 'ismap', 'multiple', 'nohref', 'noresize', 'noshade', 'nowrap' + ]) + _plaintext_elements = set(['textarea']) + _c_like_cdata = set(['script', 'style']) + + def __init__(self, dialect): + self._dialect = dialect + + def __call__(self, s): + return escape(s) + + def __getattr__(self, tag): + if tag[:2] == '__': + raise AttributeError(tag) + + def proxy(*children, **arguments): + buffer = '<' + tag + for key, value in iteritems(arguments): + if value is None: + continue + if key[-1] == '_': + key = key[:-1] + if key in self._boolean_attributes: + if not value: + continue + if self._dialect == 'xhtml': + value = '="' + key + '"' + else: + value = '' + else: + value = '="' + escape(value) + '"' + buffer += ' ' + key + value + if not children and tag in self._empty_elements: + if self._dialect == 'xhtml': + buffer += ' />' + else: + buffer += '>' + return buffer + buffer += '>' + + children_as_string = ''.join([text_type(x) for x in children + if x is not None]) + + if children_as_string: + if tag in self._plaintext_elements: + children_as_string = escape(children_as_string) + elif tag in self._c_like_cdata and self._dialect == 'xhtml': + children_as_string = '/**/' + buffer += children_as_string + '' + return buffer + return proxy + + def __repr__(self): + return '<%s for %r>' % ( + self.__class__.__name__, + self._dialect + ) + + +html = HTMLBuilder('html') +xhtml = HTMLBuilder('xhtml') + + +def get_content_type(mimetype, charset): + """Returns the full content type string with charset for a mimetype. + + If the mimetype represents text the charset will be appended as charset + parameter, otherwise the mimetype is returned unchanged. + + :param mimetype: the mimetype to be used as content type. + :param charset: the charset to be appended in case it was a text mimetype. + :return: the content type. + """ + if mimetype.startswith('text/') or \ + mimetype == 'application/xml' or \ + (mimetype.startswith('application/') and + mimetype.endswith('+xml')): + mimetype += '; charset=' + charset + return mimetype + + +def format_string(string, context): + """String-template format a string: + + >>> format_string('$foo and ${foo}s', dict(foo=42)) + '42 and 42s' + + This does not do any attribute lookup etc. For more advanced string + formattings have a look at the `werkzeug.template` module. + + :param string: the format string. + :param context: a dict with the variables to insert. + """ + def lookup_arg(match): + x = context[match.group(1) or match.group(2)] + if not isinstance(x, string_types): + x = type(string)(x) + return x + return _format_re.sub(lookup_arg, string) + + +def secure_filename(filename): + r"""Pass it a filename and it will return a secure version of it. This + filename can then safely be stored on a regular file system and passed + to :func:`os.path.join`. The filename returned is an ASCII only string + for maximum portability. + + On windows systems the function also makes sure that the file is not + named after one of the special device files. + + >>> secure_filename("My cool movie.mov") + 'My_cool_movie.mov' + >>> secure_filename("../../../etc/passwd") + 'etc_passwd' + >>> secure_filename(u'i contain cool \xfcml\xe4uts.txt') + 'i_contain_cool_umlauts.txt' + + The function might return an empty filename. It's your responsibility + to ensure that the filename is unique and that you generate random + filename if the function returned an empty one. + + .. versionadded:: 0.5 + + :param filename: the filename to secure + """ + if isinstance(filename, text_type): + from unicodedata import normalize + filename = normalize('NFKD', filename).encode('ascii', 'ignore') + if not PY2: + filename = filename.decode('ascii') + for sep in os.path.sep, os.path.altsep: + if sep: + filename = filename.replace(sep, ' ') + filename = str(_filename_ascii_strip_re.sub('', '_'.join( + filename.split()))).strip('._') + + # on nt a couple of special files are present in each folder. We + # have to ensure that the target file is not such a filename. In + # this case we prepend an underline + if os.name == 'nt' and filename and \ + filename.split('.')[0].upper() in _windows_device_files: + filename = '_' + filename + + return filename + + +def escape(s, quote=None): + """Replace special characters "&", "<", ">" and (") to HTML-safe sequences. + + There is a special handling for `None` which escapes to an empty string. + + .. versionchanged:: 0.9 + `quote` is now implicitly on. + + :param s: the string to escape. + :param quote: ignored. + """ + if s is None: + return '' + elif hasattr(s, '__html__'): + return text_type(s.__html__()) + elif not isinstance(s, string_types): + s = text_type(s) + if quote is not None: + from warnings import warn + warn(DeprecationWarning('quote parameter is implicit now'), stacklevel=2) + s = s.replace('&', '&').replace('<', '<') \ + .replace('>', '>').replace('"', """) + return s + + +def unescape(s): + """The reverse function of `escape`. This unescapes all the HTML + entities, not only the XML entities inserted by `escape`. + + :param s: the string to unescape. + """ + def handle_match(m): + name = m.group(1) + if name in HTMLBuilder._entities: + return unichr(HTMLBuilder._entities[name]) + try: + if name[:2] in ('#x', '#X'): + return unichr(int(name[2:], 16)) + elif name.startswith('#'): + return unichr(int(name[1:])) + except ValueError: + pass + return u'' + return _entity_re.sub(handle_match, s) + + +def redirect(location, code=302, Response=None): + """Returns a response object (a WSGI application) that, if called, + redirects the client to the target location. Supported codes are 301, + 302, 303, 305, and 307. 300 is not supported because it's not a real + redirect and 304 because it's the answer for a request with a request + with defined If-Modified-Since headers. + + .. versionadded:: 0.6 + The location can now be a unicode string that is encoded using + the :func:`iri_to_uri` function. + + .. versionadded:: 0.10 + The class used for the Response object can now be passed in. + + :param location: the location the response should redirect to. + :param code: the redirect status code. defaults to 302. + :param class Response: a Response class to use when instantiating a + response. The default is :class:`werkzeug.wrappers.Response` if + unspecified. + """ + if Response is None: + from werkzeug.wrappers import Response + + display_location = escape(location) + if isinstance(location, text_type): + # Safe conversion is necessary here as we might redirect + # to a broken URI scheme (for instance itms-services). + from werkzeug.urls import iri_to_uri + location = iri_to_uri(location, safe_conversion=True) + response = Response( + '\n' + 'Redirecting...\n' + '

    Redirecting...

    \n' + '

    You should be redirected automatically to target URL: ' + '%s. If not click the link.' % + (escape(location), display_location), code, mimetype='text/html') + response.headers['Location'] = location + return response + + +def append_slash_redirect(environ, code=301): + """Redirects to the same URL but with a slash appended. The behavior + of this function is undefined if the path ends with a slash already. + + :param environ: the WSGI environment for the request that triggers + the redirect. + :param code: the status code for the redirect. + """ + new_path = environ['PATH_INFO'].strip('/') + '/' + query_string = environ.get('QUERY_STRING') + if query_string: + new_path += '?' + query_string + return redirect(new_path, code) + + +def import_string(import_name, silent=False): + """Imports an object based on a string. This is useful if you want to + use import paths as endpoints or something similar. An import path can + be specified either in dotted notation (``xml.sax.saxutils.escape``) + or with a colon as object delimiter (``xml.sax.saxutils:escape``). + + If `silent` is True the return value will be `None` if the import fails. + + :param import_name: the dotted name for the object to import. + :param silent: if set to `True` import errors are ignored and + `None` is returned instead. + :return: imported object + """ + # force the import name to automatically convert to strings + # __import__ is not able to handle unicode strings in the fromlist + # if the module is a package + import_name = str(import_name).replace(':', '.') + try: + try: + __import__(import_name) + except ImportError: + if '.' not in import_name: + raise + else: + return sys.modules[import_name] + + module_name, obj_name = import_name.rsplit('.', 1) + try: + module = __import__(module_name, None, None, [obj_name]) + except ImportError: + # support importing modules not yet set up by the parent module + # (or package for that matter) + module = import_string(module_name) + + try: + return getattr(module, obj_name) + except AttributeError as e: + raise ImportError(e) + + except ImportError as e: + if not silent: + reraise( + ImportStringError, + ImportStringError(import_name, e), + sys.exc_info()[2]) + + +def find_modules(import_path, include_packages=False, recursive=False): + """Finds all the modules below a package. This can be useful to + automatically import all views / controllers so that their metaclasses / + function decorators have a chance to register themselves on the + application. + + Packages are not returned unless `include_packages` is `True`. This can + also recursively list modules but in that case it will import all the + packages to get the correct load path of that module. + + :param import_name: the dotted name for the package to find child modules. + :param include_packages: set to `True` if packages should be returned, too. + :param recursive: set to `True` if recursion should happen. + :return: generator + """ + module = import_string(import_path) + path = getattr(module, '__path__', None) + if path is None: + raise ValueError('%r is not a package' % import_path) + basename = module.__name__ + '.' + for importer, modname, ispkg in pkgutil.iter_modules(path): + modname = basename + modname + if ispkg: + if include_packages: + yield modname + if recursive: + for item in find_modules(modname, include_packages, True): + yield item + else: + yield modname + + +def validate_arguments(func, args, kwargs, drop_extra=True): + """Checks if the function accepts the arguments and keyword arguments. + Returns a new ``(args, kwargs)`` tuple that can safely be passed to + the function without causing a `TypeError` because the function signature + is incompatible. If `drop_extra` is set to `True` (which is the default) + any extra positional or keyword arguments are dropped automatically. + + The exception raised provides three attributes: + + `missing` + A set of argument names that the function expected but where + missing. + + `extra` + A dict of keyword arguments that the function can not handle but + where provided. + + `extra_positional` + A list of values that where given by positional argument but the + function cannot accept. + + This can be useful for decorators that forward user submitted data to + a view function:: + + from werkzeug.utils import ArgumentValidationError, validate_arguments + + def sanitize(f): + def proxy(request): + data = request.values.to_dict() + try: + args, kwargs = validate_arguments(f, (request,), data) + except ArgumentValidationError: + raise BadRequest('The browser failed to transmit all ' + 'the data expected.') + return f(*args, **kwargs) + return proxy + + :param func: the function the validation is performed against. + :param args: a tuple of positional arguments. + :param kwargs: a dict of keyword arguments. + :param drop_extra: set to `False` if you don't want extra arguments + to be silently dropped. + :return: tuple in the form ``(args, kwargs)``. + """ + parser = _parse_signature(func) + args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5] + if missing: + raise ArgumentValidationError(tuple(missing)) + elif (extra or extra_positional) and not drop_extra: + raise ArgumentValidationError(None, extra, extra_positional) + return tuple(args), kwargs + + +def bind_arguments(func, args, kwargs): + """Bind the arguments provided into a dict. When passed a function, + a tuple of arguments and a dict of keyword arguments `bind_arguments` + returns a dict of names as the function would see it. This can be useful + to implement a cache decorator that uses the function arguments to build + the cache key based on the values of the arguments. + + :param func: the function the arguments should be bound for. + :param args: tuple of positional arguments. + :param kwargs: a dict of keyword arguments. + :return: a :class:`dict` of bound keyword arguments. + """ + args, kwargs, missing, extra, extra_positional, \ + arg_spec, vararg_var, kwarg_var = _parse_signature(func)(args, kwargs) + values = {} + for (name, has_default, default), value in zip(arg_spec, args): + values[name] = value + if vararg_var is not None: + values[vararg_var] = tuple(extra_positional) + elif extra_positional: + raise TypeError('too many positional arguments') + if kwarg_var is not None: + multikw = set(extra) & set([x[0] for x in arg_spec]) + if multikw: + raise TypeError('got multiple values for keyword argument ' + + repr(next(iter(multikw)))) + values[kwarg_var] = extra + elif extra: + raise TypeError('got unexpected keyword argument ' + + repr(next(iter(extra)))) + return values + + +class ArgumentValidationError(ValueError): + + """Raised if :func:`validate_arguments` fails to validate""" + + def __init__(self, missing=None, extra=None, extra_positional=None): + self.missing = set(missing or ()) + self.extra = extra or {} + self.extra_positional = extra_positional or [] + ValueError.__init__(self, 'function arguments invalid. (' + '%d missing, %d additional)' % ( + len(self.missing), + len(self.extra) + len(self.extra_positional) + )) + + +class ImportStringError(ImportError): + + """Provides information about a failed :func:`import_string` attempt.""" + + #: String in dotted notation that failed to be imported. + import_name = None + #: Wrapped exception. + exception = None + + def __init__(self, import_name, exception): + self.import_name = import_name + self.exception = exception + + msg = ( + 'import_string() failed for %r. Possible reasons are:\n\n' + '- missing __init__.py in a package;\n' + '- package or module path not included in sys.path;\n' + '- duplicated package or module name taking precedence in ' + 'sys.path;\n' + '- missing module, class, function or variable;\n\n' + 'Debugged import:\n\n%s\n\n' + 'Original exception:\n\n%s: %s') + + name = '' + tracked = [] + for part in import_name.replace(':', '.').split('.'): + name += (name and '.') + part + imported = import_string(name, silent=True) + if imported: + tracked.append((name, getattr(imported, '__file__', None))) + else: + track = ['- %r found in %r.' % (n, i) for n, i in tracked] + track.append('- %r not found.' % name) + msg = msg % (import_name, '\n'.join(track), + exception.__class__.__name__, str(exception)) + break + + ImportError.__init__(self, msg) + + def __repr__(self): + return '<%s(%r, %r)>' % (self.__class__.__name__, self.import_name, + self.exception) + + +# DEPRECATED +# these objects were previously in this module as well. we import +# them here for backwards compatibility with old pickles. +from werkzeug.datastructures import ( # noqa + MultiDict, CombinedMultiDict, Headers, EnvironHeaders) +from werkzeug.http import parse_cookie, dump_cookie # noqa diff --git a/deps/werkzeug/wrappers.py b/deps/werkzeug/wrappers.py new file mode 100644 index 00000000..2d596693 --- /dev/null +++ b/deps/werkzeug/wrappers.py @@ -0,0 +1,1848 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.wrappers + ~~~~~~~~~~~~~~~~~ + + The wrappers are simple request and response objects which you can + subclass to do whatever you want them to do. The request object contains + the information transmitted by the client (webbrowser) and the response + object contains all the information sent back to the browser. + + An important detail is that the request object is created with the WSGI + environ and will act as high-level proxy whereas the response object is an + actual WSGI application. + + Like everything else in Werkzeug these objects will work correctly with + unicode data. Incoming form data parsed by the response object will be + decoded into an unicode object if possible and if it makes sense. + + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +from functools import update_wrapper +from datetime import datetime, timedelta + +from werkzeug.http import HTTP_STATUS_CODES, \ + parse_accept_header, parse_cache_control_header, parse_etags, \ + parse_date, generate_etag, is_resource_modified, unquote_etag, \ + quote_etag, parse_set_header, parse_authorization_header, \ + parse_www_authenticate_header, remove_entity_headers, \ + parse_options_header, dump_options_header, http_date, \ + parse_if_range_header, parse_cookie, dump_cookie, \ + parse_range_header, parse_content_range_header, dump_header +from werkzeug.urls import url_decode, iri_to_uri, url_join +from werkzeug.formparser import FormDataParser, default_stream_factory +from werkzeug.utils import cached_property, environ_property, \ + header_property, get_content_type +from werkzeug.wsgi import get_current_url, get_host, \ + ClosingIterator, get_input_stream, get_content_length +from werkzeug.datastructures import MultiDict, CombinedMultiDict, Headers, \ + EnvironHeaders, ImmutableMultiDict, ImmutableTypeConversionDict, \ + ImmutableList, MIMEAccept, CharsetAccept, LanguageAccept, \ + ResponseCacheControl, RequestCacheControl, CallbackDict, \ + ContentRange, iter_multi_items +from werkzeug._internal import _get_environ +from werkzeug._compat import to_bytes, string_types, text_type, \ + integer_types, wsgi_decoding_dance, wsgi_get_bytes, \ + to_unicode, to_native, BytesIO + + +def _run_wsgi_app(*args): + """This function replaces itself to ensure that the test module is not + imported unless required. DO NOT USE! + """ + global _run_wsgi_app + from werkzeug.test import run_wsgi_app as _run_wsgi_app + return _run_wsgi_app(*args) + + +def _warn_if_string(iterable): + """Helper for the response objects to check if the iterable returned + to the WSGI server is not a string. + """ + if isinstance(iterable, string_types): + from warnings import warn + warn(Warning('response iterable was set to a string. This appears ' + 'to work but means that the server will send the ' + 'data to the client char, by char. This is almost ' + 'never intended behavior, use response.data to assign ' + 'strings to the response object.'), stacklevel=2) + + +def _assert_not_shallow(request): + if request.shallow: + raise RuntimeError('A shallow request tried to consume ' + 'form data. If you really want to do ' + 'that, set `shallow` to False.') + + +def _iter_encoded(iterable, charset): + for item in iterable: + if isinstance(item, text_type): + yield item.encode(charset) + else: + yield item + + +class BaseRequest(object): + + """Very basic request object. This does not implement advanced stuff like + entity tag parsing or cache controls. The request object is created with + the WSGI environment as first argument and will add itself to the WSGI + environment as ``'werkzeug.request'`` unless it's created with + `populate_request` set to False. + + There are a couple of mixins available that add additional functionality + to the request object, there is also a class called `Request` which + subclasses `BaseRequest` and all the important mixins. + + It's a good idea to create a custom subclass of the :class:`BaseRequest` + and add missing functionality either via mixins or direct implementation. + Here an example for such subclasses:: + + from werkzeug.wrappers import BaseRequest, ETagRequestMixin + + class Request(BaseRequest, ETagRequestMixin): + pass + + Request objects are **read only**. As of 0.5 modifications are not + allowed in any place. Unlike the lower level parsing functions the + request object will use immutable objects everywhere possible. + + Per default the request object will assume all the text data is `utf-8` + encoded. Please refer to `the unicode chapter `_ for more + details about customizing the behavior. + + Per default the request object will be added to the WSGI + environment as `werkzeug.request` to support the debugging system. + If you don't want that, set `populate_request` to `False`. + + If `shallow` is `True` the environment is initialized as shallow + object around the environ. Every operation that would modify the + environ in any way (such as consuming form data) raises an exception + unless the `shallow` attribute is explicitly set to `False`. This + is useful for middlewares where you don't want to consume the form + data by accident. A shallow request is not populated to the WSGI + environment. + + .. versionchanged:: 0.5 + read-only mode was enforced by using immutables classes for all + data. + """ + + #: the charset for the request, defaults to utf-8 + charset = 'utf-8' + + #: the error handling procedure for errors, defaults to 'replace' + encoding_errors = 'replace' + + #: the maximum content length. This is forwarded to the form data + #: parsing function (:func:`parse_form_data`). When set and the + #: :attr:`form` or :attr:`files` attribute is accessed and the + #: parsing fails because more than the specified value is transmitted + #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. + #: + #: Have a look at :ref:`dealing-with-request-data` for more details. + #: + #: .. versionadded:: 0.5 + max_content_length = None + + #: the maximum form field size. This is forwarded to the form data + #: parsing function (:func:`parse_form_data`). When set and the + #: :attr:`form` or :attr:`files` attribute is accessed and the + #: data in memory for post data is longer than the specified value a + #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. + #: + #: Have a look at :ref:`dealing-with-request-data` for more details. + #: + #: .. versionadded:: 0.5 + max_form_memory_size = None + + #: the class to use for `args` and `form`. The default is an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports + #: multiple values per key. alternatively it makes sense to use an + #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which + #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` + #: which is the fastest but only remembers the last key. It is also + #: possible to use mutable structures, but this is not recommended. + #: + #: .. versionadded:: 0.6 + parameter_storage_class = ImmutableMultiDict + + #: the type to be used for list values from the incoming WSGI environment. + #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used + #: (for example for :attr:`access_list`). + #: + #: .. versionadded:: 0.6 + list_storage_class = ImmutableList + + #: the type to be used for dict values from the incoming WSGI environment. + #: By default an + #: :class:`~werkzeug.datastructures.ImmutableTypeConversionDict` is used + #: (for example for :attr:`cookies`). + #: + #: .. versionadded:: 0.6 + dict_storage_class = ImmutableTypeConversionDict + + #: The form data parser that shoud be used. Can be replaced to customize + #: the form date parsing. + form_data_parser_class = FormDataParser + + #: Optionally a list of hosts that is trusted by this request. By default + #: all hosts are trusted which means that whatever the client sends the + #: host is will be accepted. + #: + #: This is the recommended setup as a webserver should manually be set up + #: to only route correct hosts to the application, and remove the + #: `X-Forwarded-Host` header if it is not being used (see + #: :func:`werkzeug.wsgi.get_host`). + #: + #: .. versionadded:: 0.9 + trusted_hosts = None + + #: Indicates whether the data descriptor should be allowed to read and + #: buffer up the input stream. By default it's enabled. + #: + #: .. versionadded:: 0.9 + disable_data_descriptor = False + + def __init__(self, environ, populate_request=True, shallow=False): + self.environ = environ + if populate_request and not shallow: + self.environ['werkzeug.request'] = self + self.shallow = shallow + + def __repr__(self): + # make sure the __repr__ even works if the request was created + # from an invalid WSGI environment. If we display the request + # in a debug session we don't want the repr to blow up. + args = [] + try: + args.append("'%s'" % to_native(self.url, self.url_charset)) + args.append('[%s]' % self.method) + except Exception: + args.append('(invalid WSGI environ)') + + return '<%s %s>' % ( + self.__class__.__name__, + ' '.join(args) + ) + + @property + def url_charset(self): + """The charset that is assumed for URLs. Defaults to the value + of :attr:`charset`. + + .. versionadded:: 0.6 + """ + return self.charset + + @classmethod + def from_values(cls, *args, **kwargs): + """Create a new request object based on the values provided. If + environ is given missing values are filled from there. This method is + useful for small scripts when you need to simulate a request from an URL. + Do not use this method for unittesting, there is a full featured client + object (:class:`Client`) that allows to create multipart requests, + support for cookies etc. + + This accepts the same options as the + :class:`~werkzeug.test.EnvironBuilder`. + + .. versionchanged:: 0.5 + This method now accepts the same arguments as + :class:`~werkzeug.test.EnvironBuilder`. Because of this the + `environ` parameter is now called `environ_overrides`. + + :return: request object + """ + from werkzeug.test import EnvironBuilder + charset = kwargs.pop('charset', cls.charset) + kwargs['charset'] = charset + builder = EnvironBuilder(*args, **kwargs) + try: + return builder.get_request(cls) + finally: + builder.close() + + @classmethod + def application(cls, f): + """Decorate a function as responder that accepts the request as first + argument. This works like the :func:`responder` decorator but the + function is passed the request object as first argument and the + request object will be closed automatically:: + + @Request.application + def my_wsgi_app(request): + return Response('Hello World!') + + :param f: the WSGI callable to decorate + :return: a new WSGI callable + """ + #: return a callable that wraps the -2nd argument with the request + #: and calls the function with all the arguments up to that one and + #: the request. The return value is then called with the latest + #: two arguments. This makes it possible to use this decorator for + #: both methods and standalone WSGI functions. + def application(*args): + request = cls(args[-2]) + with request: + return f(*args[:-2] + (request,))(*args[-2:]) + return update_wrapper(application, f) + + def _get_file_stream(self, total_content_length, content_type, filename=None, + content_length=None): + """Called to get a stream for the file upload. + + This must provide a file-like class with `read()`, `readline()` + and `seek()` methods that is both writeable and readable. + + The default implementation returns a temporary file if the total + content length is higher than 500KB. Because many browsers do not + provide a content length for the files only the total content + length matters. + + :param total_content_length: the total content length of all the + data in the request combined. This value + is guaranteed to be there. + :param content_type: the mimetype of the uploaded file. + :param filename: the filename of the uploaded file. May be `None`. + :param content_length: the length of this file. This value is usually + not provided because webbrowsers do not provide + this value. + """ + return default_stream_factory(total_content_length, content_type, + filename, content_length) + + @property + def want_form_data_parsed(self): + """Returns True if the request method carries content. As of + Werkzeug 0.9 this will be the case if a content type is transmitted. + + .. versionadded:: 0.8 + """ + return bool(self.environ.get('CONTENT_TYPE')) + + def make_form_data_parser(self): + """Creates the form data parser. Instanciates the + :attr:`form_data_parser_class` with some parameters. + + .. versionadded:: 0.8 + """ + return self.form_data_parser_class(self._get_file_stream, + self.charset, + self.encoding_errors, + self.max_form_memory_size, + self.max_content_length, + self.parameter_storage_class) + + def _load_form_data(self): + """Method used internally to retrieve submitted data. After calling + this sets `form` and `files` on the request object to multi dicts + filled with the incoming form data. As a matter of fact the input + stream will be empty afterwards. You can also call this method to + force the parsing of the form data. + + .. versionadded:: 0.8 + """ + # abort early if we have already consumed the stream + if 'form' in self.__dict__: + return + + _assert_not_shallow(self) + + if self.want_form_data_parsed: + content_type = self.environ.get('CONTENT_TYPE', '') + content_length = get_content_length(self.environ) + mimetype, options = parse_options_header(content_type) + parser = self.make_form_data_parser() + data = parser.parse(self._get_stream_for_parsing(), + mimetype, content_length, options) + else: + data = (self.stream, self.parameter_storage_class(), + self.parameter_storage_class()) + + # inject the values into the instance dict so that we bypass + # our cached_property non-data descriptor. + d = self.__dict__ + d['stream'], d['form'], d['files'] = data + + def _get_stream_for_parsing(self): + """This is the same as accessing :attr:`stream` with the difference + that if it finds cached data from calling :meth:`get_data` first it + will create a new stream out of the cached data. + + .. versionadded:: 0.9.3 + """ + cached_data = getattr(self, '_cached_data', None) + if cached_data is not None: + return BytesIO(cached_data) + return self.stream + + def close(self): + """Closes associated resources of this request object. This + closes all file handles explicitly. You can also use the request + object in a with statement which will automatically close it. + + .. versionadded:: 0.9 + """ + files = self.__dict__.get('files') + for key, value in iter_multi_items(files or ()): + value.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close() + + @cached_property + def stream(self): + """The stream to read incoming data from. Unlike :attr:`input_stream` + this stream is properly guarded that you can't accidentally read past + the length of the input. Werkzeug will internally always refer to + this stream to read data which makes it possible to wrap this + object with a stream that does filtering. + + .. versionchanged:: 0.9 + This stream is now always available but might be consumed by the + form parser later on. Previously the stream was only set if no + parsing happened. + """ + _assert_not_shallow(self) + return get_input_stream(self.environ) + + input_stream = environ_property('wsgi.input', """ + The WSGI input stream. + + In general it's a bad idea to use this one because you can easily read past + the boundary. Use the :attr:`stream` instead. + """) + + @cached_property + def args(self): + """The parsed URL parameters. By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + """ + return url_decode(wsgi_get_bytes(self.environ.get('QUERY_STRING', '')), + self.url_charset, errors=self.encoding_errors, + cls=self.parameter_storage_class) + + @cached_property + def data(self): + if self.disable_data_descriptor: + raise AttributeError('data descriptor is disabled') + # XXX: this should eventually be deprecated. + + # We trigger form data parsing first which means that the descriptor + # will not cache the data that would otherwise be .form or .files + # data. This restores the behavior that was there in Werkzeug + # before 0.9. New code should use :meth:`get_data` explicitly as + # this will make behavior explicit. + return self.get_data(parse_form_data=True) + + def get_data(self, cache=True, as_text=False, parse_form_data=False): + """This reads the buffered incoming data from the client into one + bytestring. By default this is cached but that behavior can be + changed by setting `cache` to `False`. + + Usually it's a bad idea to call this method without checking the + content length first as a client could send dozens of megabytes or more + to cause memory problems on the server. + + Note that if the form data was already parsed this method will not + return anything as form data parsing does not cache the data like + this method does. To implicitly invoke form data parsing function + set `parse_form_data` to `True`. When this is done the return value + of this method will be an empty string if the form parser handles + the data. This generally is not necessary as if the whole data is + cached (which is the default) the form parser will used the cached + data to parse the form data. Please be generally aware of checking + the content length first in any case before calling this method + to avoid exhausting server memory. + + If `as_text` is set to `True` the return value will be a decoded + unicode string. + + .. versionadded:: 0.9 + """ + rv = getattr(self, '_cached_data', None) + if rv is None: + if parse_form_data: + self._load_form_data() + rv = self.stream.read() + if cache: + self._cached_data = rv + if as_text: + rv = rv.decode(self.charset, self.encoding_errors) + return rv + + @cached_property + def form(self): + """The form parameters. By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + """ + self._load_form_data() + return self.form + + @cached_property + def values(self): + """Combined multi dict for :attr:`args` and :attr:`form`.""" + args = [] + for d in self.args, self.form: + if not isinstance(d, MultiDict): + d = MultiDict(d) + args.append(d) + return CombinedMultiDict(args) + + @cached_property + def files(self): + """:class:`~werkzeug.datastructures.MultiDict` object containing + all uploaded files. Each key in :attr:`files` is the name from the + ````. Each value in :attr:`files` is a + Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. + + Note that :attr:`files` will only contain data if the request method was + POST, PUT or PATCH and the ``

    `` that posted to the request had + ``enctype="multipart/form-data"``. It will be empty otherwise. + + See the :class:`~werkzeug.datastructures.MultiDict` / + :class:`~werkzeug.datastructures.FileStorage` documentation for + more details about the used data structure. + """ + self._load_form_data() + return self.files + + @cached_property + def cookies(self): + """Read only access to the retrieved cookie values as dictionary.""" + return parse_cookie(self.environ, self.charset, + self.encoding_errors, + cls=self.dict_storage_class) + + @cached_property + def headers(self): + """The headers from the WSGI environ as immutable + :class:`~werkzeug.datastructures.EnvironHeaders`. + """ + return EnvironHeaders(self.environ) + + @cached_property + def path(self): + """Requested path as unicode. This works a bit like the regular path + info in the WSGI environment but will always include a leading slash, + even if the URL root is accessed. + """ + raw_path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '', + self.charset, self.encoding_errors) + return '/' + raw_path.lstrip('/') + + @cached_property + def full_path(self): + """Requested path as unicode, including the query string.""" + return self.path + u'?' + to_unicode(self.query_string, self.url_charset) + + @cached_property + def script_root(self): + """The root path of the script without the trailing slash.""" + raw_path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '', + self.charset, self.encoding_errors) + return raw_path.rstrip('/') + + @cached_property + def url(self): + """The reconstructed current URL as IRI. + See also: :attr:`trusted_hosts`. + """ + return get_current_url(self.environ, + trusted_hosts=self.trusted_hosts) + + @cached_property + def base_url(self): + """Like :attr:`url` but without the querystring + See also: :attr:`trusted_hosts`. + """ + return get_current_url(self.environ, strip_querystring=True, + trusted_hosts=self.trusted_hosts) + + @cached_property + def url_root(self): + """The full URL root (with hostname), this is the application + root as IRI. + See also: :attr:`trusted_hosts`. + """ + return get_current_url(self.environ, True, + trusted_hosts=self.trusted_hosts) + + @cached_property + def host_url(self): + """Just the host with scheme as IRI. + See also: :attr:`trusted_hosts`. + """ + return get_current_url(self.environ, host_only=True, + trusted_hosts=self.trusted_hosts) + + @cached_property + def host(self): + """Just the host including the port if available. + See also: :attr:`trusted_hosts`. + """ + return get_host(self.environ, trusted_hosts=self.trusted_hosts) + + query_string = environ_property( + 'QUERY_STRING', '', read_only=True, + load_func=wsgi_get_bytes, doc='The URL parameters as raw bytestring.') + method = environ_property( + 'REQUEST_METHOD', 'GET', read_only=True, + load_func=lambda x: x.upper(), + doc="The transmission method. (For example ``'GET'`` or ``'POST'``).") + + @cached_property + def access_route(self): + """If a forwarded header exists this is a list of all ip addresses + from the client ip to the last proxy server. + """ + if 'HTTP_X_FORWARDED_FOR' in self.environ: + addr = self.environ['HTTP_X_FORWARDED_FOR'].split(',') + return self.list_storage_class([x.strip() for x in addr]) + elif 'REMOTE_ADDR' in self.environ: + return self.list_storage_class([self.environ['REMOTE_ADDR']]) + return self.list_storage_class() + + @property + def remote_addr(self): + """The remote address of the client.""" + return self.environ.get('REMOTE_ADDR') + + remote_user = environ_property('REMOTE_USER', doc=''' + If the server supports user authentication, and the script is + protected, this attribute contains the username the user has + authenticated as.''') + + scheme = environ_property('wsgi.url_scheme', doc=''' + URL scheme (http or https). + + .. versionadded:: 0.7''') + + is_xhr = property(lambda x: x.environ.get('HTTP_X_REQUESTED_WITH', '') + .lower() == 'xmlhttprequest', doc=''' + True if the request was triggered via a JavaScript XMLHttpRequest. + This only works with libraries that support the `X-Requested-With` + header and set it to "XMLHttpRequest". Libraries that do that are + prototype, jQuery and Mochikit and probably some more.''') + is_secure = property(lambda x: x.environ['wsgi.url_scheme'] == 'https', + doc='`True` if the request is secure.') + is_multithread = environ_property('wsgi.multithread', doc=''' + boolean that is `True` if the application is served by + a multithreaded WSGI server.''') + is_multiprocess = environ_property('wsgi.multiprocess', doc=''' + boolean that is `True` if the application is served by + a WSGI server that spawns multiple processes.''') + is_run_once = environ_property('wsgi.run_once', doc=''' + boolean that is `True` if the application will be executed only + once in a process lifetime. This is the case for CGI for example, + but it's not guaranteed that the execution only happens one time.''') + + +class BaseResponse(object): + + """Base response class. The most important fact about a response object + is that it's a regular WSGI application. It's initialized with a couple + of response parameters (headers, body, status code etc.) and will start a + valid WSGI response when called with the environ and start response + callable. + + Because it's a WSGI application itself processing usually ends before the + actual response is sent to the server. This helps debugging systems + because they can catch all the exceptions before responses are started. + + Here a small example WSGI application that takes advantage of the + response objects:: + + from werkzeug.wrappers import BaseResponse as Response + + def index(): + return Response('Index page') + + def application(environ, start_response): + path = environ.get('PATH_INFO') or '/' + if path == '/': + response = index() + else: + response = Response('Not Found', status=404) + return response(environ, start_response) + + Like :class:`BaseRequest` which object is lacking a lot of functionality + implemented in mixins. This gives you a better control about the actual + API of your response objects, so you can create subclasses and add custom + functionality. A full featured response object is available as + :class:`Response` which implements a couple of useful mixins. + + To enforce a new type of already existing responses you can use the + :meth:`force_type` method. This is useful if you're working with different + subclasses of response objects and you want to post process them with a + known interface. + + Per default the request object will assume all the text data is `utf-8` + encoded. Please refer to `the unicode chapter `_ for more + details about customizing the behavior. + + Response can be any kind of iterable or string. If it's a string it's + considered being an iterable with one item which is the string passed. + Headers can be a list of tuples or a + :class:`~werkzeug.datastructures.Headers` object. + + Special note for `mimetype` and `content_type`: For most mime types + `mimetype` and `content_type` work the same, the difference affects + only 'text' mimetypes. If the mimetype passed with `mimetype` is a + mimetype starting with `text/`, the charset parameter of the response + object is appended to it. In contrast the `content_type` parameter is + always added as header unmodified. + + .. versionchanged:: 0.5 + the `direct_passthrough` parameter was added. + + :param response: a string or response iterable. + :param status: a string with a status or an integer with the status code. + :param headers: a list of headers or a + :class:`~werkzeug.datastructures.Headers` object. + :param mimetype: the mimetype for the request. See notice above. + :param content_type: the content type for the request. See notice above. + :param direct_passthrough: if set to `True` :meth:`iter_encoded` is not + called before iteration which makes it + possible to pass special iterators through + unchanged (see :func:`wrap_file` for more + details.) + """ + + #: the charset of the response. + charset = 'utf-8' + + #: the default status if none is provided. + default_status = 200 + + #: the default mimetype if none is provided. + default_mimetype = 'text/plain' + + #: if set to `False` accessing properties on the response object will + #: not try to consume the response iterator and convert it into a list. + #: + #: .. versionadded:: 0.6.2 + #: + #: That attribute was previously called `implicit_seqence_conversion`. + #: (Notice the typo). If you did use this feature, you have to adapt + #: your code to the name change. + implicit_sequence_conversion = True + + #: Should this response object correct the location header to be RFC + #: conformant? This is true by default. + #: + #: .. versionadded:: 0.8 + autocorrect_location_header = True + + #: Should this response object automatically set the content-length + #: header if possible? This is true by default. + #: + #: .. versionadded:: 0.8 + automatically_set_content_length = True + + def __init__(self, response=None, status=None, headers=None, + mimetype=None, content_type=None, direct_passthrough=False): + if isinstance(headers, Headers): + self.headers = headers + elif not headers: + self.headers = Headers() + else: + self.headers = Headers(headers) + + if content_type is None: + if mimetype is None and 'content-type' not in self.headers: + mimetype = self.default_mimetype + if mimetype is not None: + mimetype = get_content_type(mimetype, self.charset) + content_type = mimetype + if content_type is not None: + self.headers['Content-Type'] = content_type + if status is None: + status = self.default_status + if isinstance(status, integer_types): + self.status_code = status + else: + self.status = status + + self.direct_passthrough = direct_passthrough + self._on_close = [] + + # we set the response after the headers so that if a class changes + # the charset attribute, the data is set in the correct charset. + if response is None: + self.response = [] + elif isinstance(response, (text_type, bytes, bytearray)): + self.set_data(response) + else: + self.response = response + + def call_on_close(self, func): + """Adds a function to the internal list of functions that should + be called as part of closing down the response. Since 0.7 this + function also returns the function that was passed so that this + can be used as a decorator. + + .. versionadded:: 0.6 + """ + self._on_close.append(func) + return func + + def __repr__(self): + if self.is_sequence: + body_info = '%d bytes' % sum(map(len, self.iter_encoded())) + else: + body_info = 'streamed' if self.is_streamed else 'likely-streamed' + return '<%s %s [%s]>' % ( + self.__class__.__name__, + body_info, + self.status + ) + + @classmethod + def force_type(cls, response, environ=None): + """Enforce that the WSGI response is a response object of the current + type. Werkzeug will use the :class:`BaseResponse` internally in many + situations like the exceptions. If you call :meth:`get_response` on an + exception you will get back a regular :class:`BaseResponse` object, even + if you are using a custom subclass. + + This method can enforce a given response type, and it will also + convert arbitrary WSGI callables into response objects if an environ + is provided:: + + # convert a Werkzeug response object into an instance of the + # MyResponseClass subclass. + response = MyResponseClass.force_type(response) + + # convert any WSGI application into a response object + response = MyResponseClass.force_type(response, environ) + + This is especially useful if you want to post-process responses in + the main dispatcher and use functionality provided by your subclass. + + Keep in mind that this will modify response objects in place if + possible! + + :param response: a response object or wsgi application. + :param environ: a WSGI environment object. + :return: a response object. + """ + if not isinstance(response, BaseResponse): + if environ is None: + raise TypeError('cannot convert WSGI application into ' + 'response objects without an environ') + response = BaseResponse(*_run_wsgi_app(response, environ)) + response.__class__ = cls + return response + + @classmethod + def from_app(cls, app, environ, buffered=False): + """Create a new response object from an application output. This + works best if you pass it an application that returns a generator all + the time. Sometimes applications may use the `write()` callable + returned by the `start_response` function. This tries to resolve such + edge cases automatically. But if you don't get the expected output + you should set `buffered` to `True` which enforces buffering. + + :param app: the WSGI application to execute. + :param environ: the WSGI environment to execute against. + :param buffered: set to `True` to enforce buffering. + :return: a response object. + """ + return cls(*_run_wsgi_app(app, environ, buffered)) + + def _get_status_code(self): + return self._status_code + + def _set_status_code(self, code): + self._status_code = code + try: + self._status = '%d %s' % (code, HTTP_STATUS_CODES[code].upper()) + except KeyError: + self._status = '%d UNKNOWN' % code + status_code = property(_get_status_code, _set_status_code, + doc='The HTTP Status code as number') + del _get_status_code, _set_status_code + + def _get_status(self): + return self._status + + def _set_status(self, value): + self._status = to_native(value) + try: + self._status_code = int(self._status.split(None, 1)[0]) + except ValueError: + self._status_code = 0 + self._status = '0 %s' % self._status + status = property(_get_status, _set_status, doc='The HTTP Status code') + del _get_status, _set_status + + def get_data(self, as_text=False): + """The string representation of the request body. Whenever you call + this property the request iterable is encoded and flattened. This + can lead to unwanted behavior if you stream big data. + + This behavior can be disabled by setting + :attr:`implicit_sequence_conversion` to `False`. + + If `as_text` is set to `True` the return value will be a decoded + unicode string. + + .. versionadded:: 0.9 + """ + self._ensure_sequence() + rv = b''.join(self.iter_encoded()) + if as_text: + rv = rv.decode(self.charset) + return rv + + def set_data(self, value): + """Sets a new string as response. The value set must either by a + unicode or bytestring. If a unicode string is set it's encoded + automatically to the charset of the response (utf-8 by default). + + .. versionadded:: 0.9 + """ + # if an unicode string is set, it's encoded directly so that we + # can set the content length + if isinstance(value, text_type): + value = value.encode(self.charset) + else: + value = bytes(value) + self.response = [value] + if self.automatically_set_content_length: + self.headers['Content-Length'] = str(len(value)) + + data = property(get_data, set_data, doc=''' + A descriptor that calls :meth:`get_data` and :meth:`set_data`. This + should not be used and will eventually get deprecated. + ''') + + def calculate_content_length(self): + """Returns the content length if available or `None` otherwise.""" + try: + self._ensure_sequence() + except RuntimeError: + return None + return sum(len(x) for x in self.response) + + def _ensure_sequence(self, mutable=False): + """This method can be called by methods that need a sequence. If + `mutable` is true, it will also ensure that the response sequence + is a standard Python list. + + .. versionadded:: 0.6 + """ + if self.is_sequence: + # if we need a mutable object, we ensure it's a list. + if mutable and not isinstance(self.response, list): + self.response = list(self.response) + return + if self.direct_passthrough: + raise RuntimeError('Attempted implicit sequence conversion ' + 'but the response object is in direct ' + 'passthrough mode.') + if not self.implicit_sequence_conversion: + raise RuntimeError('The response object required the iterable ' + 'to be a sequence, but the implicit ' + 'conversion was disabled. Call ' + 'make_sequence() yourself.') + self.make_sequence() + + def make_sequence(self): + """Converts the response iterator in a list. By default this happens + automatically if required. If `implicit_sequence_conversion` is + disabled, this method is not automatically called and some properties + might raise exceptions. This also encodes all the items. + + .. versionadded:: 0.6 + """ + if not self.is_sequence: + # if we consume an iterable we have to ensure that the close + # method of the iterable is called if available when we tear + # down the response + close = getattr(self.response, 'close', None) + self.response = list(self.iter_encoded()) + if close is not None: + self.call_on_close(close) + + def iter_encoded(self): + """Iter the response encoded with the encoding of the response. + If the response object is invoked as WSGI application the return + value of this method is used as application iterator unless + :attr:`direct_passthrough` was activated. + """ + if __debug__: + _warn_if_string(self.response) + # Encode in a separate function so that self.response is fetched + # early. This allows us to wrap the response with the return + # value from get_app_iter or iter_encoded. + return _iter_encoded(self.response, self.charset) + + def set_cookie(self, key, value='', max_age=None, expires=None, + path='/', domain=None, secure=None, httponly=False): + """Sets a cookie. The parameters are the same as in the cookie `Morsel` + object in the Python standard library but it accepts unicode data, too. + + :param key: the key (name) of the cookie to be set. + :param value: the value of the cookie. + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. + :param expires: should be a `datetime` object or UNIX timestamp. + :param domain: if you want to set a cross-domain cookie. For example, + ``domain=".example.com"`` will set a cookie that is + readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + """ + self.headers.add('Set-Cookie', dump_cookie(key, value, max_age, + expires, path, domain, secure, httponly, + self.charset)) + + def delete_cookie(self, key, path='/', domain=None): + """Delete a cookie. Fails silently if key doesn't exist. + + :param key: the key (name) of the cookie to be deleted. + :param path: if the cookie that should be deleted was limited to a + path, the path has to be defined here. + :param domain: if the cookie that should be deleted was limited to a + domain, that domain has to be defined here. + """ + self.set_cookie(key, expires=0, max_age=0, path=path, domain=domain) + + @property + def is_streamed(self): + """If the response is streamed (the response is not an iterable with + a length information) this property is `True`. In this case streamed + means that there is no information about the number of iterations. + This is usually `True` if a generator is passed to the response object. + + This is useful for checking before applying some sort of post + filtering that should not take place for streamed responses. + """ + try: + len(self.response) + except (TypeError, AttributeError): + return True + return False + + @property + def is_sequence(self): + """If the iterator is buffered, this property will be `True`. A + response object will consider an iterator to be buffered if the + response attribute is a list or tuple. + + .. versionadded:: 0.6 + """ + return isinstance(self.response, (tuple, list)) + + def close(self): + """Close the wrapped response if possible. You can also use the object + in a with statement which will automatically close it. + + .. versionadded:: 0.9 + Can now be used in a with statement. + """ + if hasattr(self.response, 'close'): + self.response.close() + for func in self._on_close: + func() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, tb): + self.close() + + def freeze(self): + """Call this method if you want to make your response object ready for + being pickled. This buffers the generator if there is one. It will + also set the `Content-Length` header to the length of the body. + + .. versionchanged:: 0.6 + The `Content-Length` header is now set. + """ + # we explicitly set the length to a list of the *encoded* response + # iterator. Even if the implicit sequence conversion is disabled. + self.response = list(self.iter_encoded()) + self.headers['Content-Length'] = str(sum(map(len, self.response))) + + def get_wsgi_headers(self, environ): + """This is automatically called right before the response is started + and returns headers modified for the given environment. It returns a + copy of the headers from the response with some modifications applied + if necessary. + + For example the location header (if present) is joined with the root + URL of the environment. Also the content length is automatically set + to zero here for certain status codes. + + .. versionchanged:: 0.6 + Previously that function was called `fix_headers` and modified + the response object in place. Also since 0.6, IRIs in location + and content-location headers are handled properly. + + Also starting with 0.6, Werkzeug will attempt to set the content + length if it is able to figure it out on its own. This is the + case if all the strings in the response iterable are already + encoded and the iterable is buffered. + + :param environ: the WSGI environment of the request. + :return: returns a new :class:`~werkzeug.datastructures.Headers` + object. + """ + headers = Headers(self.headers) + location = None + content_location = None + content_length = None + status = self.status_code + + # iterate over the headers to find all values in one go. Because + # get_wsgi_headers is used each response that gives us a tiny + # speedup. + for key, value in headers: + ikey = key.lower() + if ikey == u'location': + location = value + elif ikey == u'content-location': + content_location = value + elif ikey == u'content-length': + content_length = value + + # make sure the location header is an absolute URL + if location is not None: + old_location = location + if isinstance(location, text_type): + # Safe conversion is necessary here as we might redirect + # to a broken URI scheme (for instance itms-services). + location = iri_to_uri(location, safe_conversion=True) + + if self.autocorrect_location_header: + current_url = get_current_url(environ, root_only=True) + if isinstance(current_url, text_type): + current_url = iri_to_uri(current_url) + location = url_join(current_url, location) + if location != old_location: + headers['Location'] = location + + # make sure the content location is a URL + if content_location is not None and \ + isinstance(content_location, text_type): + headers['Content-Location'] = iri_to_uri(content_location) + + # remove entity headers and set content length to zero if needed. + # Also update content_length accordingly so that the automatic + # content length detection does not trigger in the following + # code. + if 100 <= status < 200 or status == 204: + headers['Content-Length'] = content_length = u'0' + elif status == 304: + remove_entity_headers(headers) + + # if we can determine the content length automatically, we + # should try to do that. But only if this does not involve + # flattening the iterator or encoding of unicode strings in + # the response. We however should not do that if we have a 304 + # response. + if self.automatically_set_content_length and \ + self.is_sequence and content_length is None and status != 304: + try: + content_length = sum(len(to_bytes(x, 'ascii')) + for x in self.response) + except UnicodeError: + # aha, something non-bytestringy in there, too bad, we + # can't safely figure out the length of the response. + pass + else: + headers['Content-Length'] = str(content_length) + + return headers + + def get_app_iter(self, environ): + """Returns the application iterator for the given environ. Depending + on the request method and the current status code the return value + might be an empty response rather than the one from the response. + + If the request method is `HEAD` or the status code is in a range + where the HTTP specification requires an empty response, an empty + iterable is returned. + + .. versionadded:: 0.6 + + :param environ: the WSGI environment of the request. + :return: a response iterable. + """ + status = self.status_code + if environ['REQUEST_METHOD'] == 'HEAD' or \ + 100 <= status < 200 or status in (204, 304): + iterable = () + elif self.direct_passthrough: + if __debug__: + _warn_if_string(self.response) + return self.response + else: + iterable = self.iter_encoded() + return ClosingIterator(iterable, self.close) + + def get_wsgi_response(self, environ): + """Returns the final WSGI response as tuple. The first item in + the tuple is the application iterator, the second the status and + the third the list of headers. The response returned is created + specially for the given environment. For example if the request + method in the WSGI environment is ``'HEAD'`` the response will + be empty and only the headers and status code will be present. + + .. versionadded:: 0.6 + + :param environ: the WSGI environment of the request. + :return: an ``(app_iter, status, headers)`` tuple. + """ + headers = self.get_wsgi_headers(environ) + app_iter = self.get_app_iter(environ) + return app_iter, self.status, headers.to_wsgi_list() + + def __call__(self, environ, start_response): + """Process this response as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + :return: an application iterator + """ + app_iter, status, headers = self.get_wsgi_response(environ) + start_response(status, headers) + return app_iter + + +class AcceptMixin(object): + + """A mixin for classes with an :attr:`~BaseResponse.environ` attribute + to get all the HTTP accept headers as + :class:`~werkzeug.datastructures.Accept` objects (or subclasses + thereof). + """ + + @cached_property + def accept_mimetypes(self): + """List of mimetypes this client supports as + :class:`~werkzeug.datastructures.MIMEAccept` object. + """ + return parse_accept_header(self.environ.get('HTTP_ACCEPT'), MIMEAccept) + + @cached_property + def accept_charsets(self): + """List of charsets this client supports as + :class:`~werkzeug.datastructures.CharsetAccept` object. + """ + return parse_accept_header(self.environ.get('HTTP_ACCEPT_CHARSET'), + CharsetAccept) + + @cached_property + def accept_encodings(self): + """List of encodings this client accepts. Encodings in a HTTP term + are compression encodings such as gzip. For charsets have a look at + :attr:`accept_charset`. + """ + return parse_accept_header(self.environ.get('HTTP_ACCEPT_ENCODING')) + + @cached_property + def accept_languages(self): + """List of languages this client accepts as + :class:`~werkzeug.datastructures.LanguageAccept` object. + + .. versionchanged 0.5 + In previous versions this was a regular + :class:`~werkzeug.datastructures.Accept` object. + """ + return parse_accept_header(self.environ.get('HTTP_ACCEPT_LANGUAGE'), + LanguageAccept) + + +class ETagRequestMixin(object): + + """Add entity tag and cache descriptors to a request object or object with + a WSGI environment available as :attr:`~BaseRequest.environ`. This not + only provides access to etags but also to the cache control header. + """ + + @cached_property + def cache_control(self): + """A :class:`~werkzeug.datastructures.RequestCacheControl` object + for the incoming cache control headers. + """ + cache_control = self.environ.get('HTTP_CACHE_CONTROL') + return parse_cache_control_header(cache_control, None, + RequestCacheControl) + + @cached_property + def if_match(self): + """An object containing all the etags in the `If-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.environ.get('HTTP_IF_MATCH')) + + @cached_property + def if_none_match(self): + """An object containing all the etags in the `If-None-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.environ.get('HTTP_IF_NONE_MATCH')) + + @cached_property + def if_modified_since(self): + """The parsed `If-Modified-Since` header as datetime object.""" + return parse_date(self.environ.get('HTTP_IF_MODIFIED_SINCE')) + + @cached_property + def if_unmodified_since(self): + """The parsed `If-Unmodified-Since` header as datetime object.""" + return parse_date(self.environ.get('HTTP_IF_UNMODIFIED_SINCE')) + + @cached_property + def if_range(self): + """The parsed `If-Range` header. + + .. versionadded:: 0.7 + + :rtype: :class:`~werkzeug.datastructures.IfRange` + """ + return parse_if_range_header(self.environ.get('HTTP_IF_RANGE')) + + @cached_property + def range(self): + """The parsed `Range` header. + + .. versionadded:: 0.7 + + :rtype: :class:`~werkzeug.datastructures.Range` + """ + return parse_range_header(self.environ.get('HTTP_RANGE')) + + +class UserAgentMixin(object): + + """Adds a `user_agent` attribute to the request object which contains the + parsed user agent of the browser that triggered the request as a + :class:`~werkzeug.useragents.UserAgent` object. + """ + + @cached_property + def user_agent(self): + """The current user agent.""" + from werkzeug.useragents import UserAgent + return UserAgent(self.environ) + + +class AuthorizationMixin(object): + + """Adds an :attr:`authorization` property that represents the parsed + value of the `Authorization` header as + :class:`~werkzeug.datastructures.Authorization` object. + """ + + @cached_property + def authorization(self): + """The `Authorization` object in parsed form.""" + header = self.environ.get('HTTP_AUTHORIZATION') + return parse_authorization_header(header) + + +class StreamOnlyMixin(object): + + """If mixed in before the request object this will change the bahavior + of it to disable handling of form parsing. This disables the + :attr:`files`, :attr:`form` attributes and will just provide a + :attr:`stream` attribute that however is always available. + + .. versionadded:: 0.9 + """ + + disable_data_descriptor = True + want_form_data_parsed = False + + +class ETagResponseMixin(object): + + """Adds extra functionality to a response object for etag and cache + handling. This mixin requires an object with at least a `headers` + object that implements a dict like interface similar to + :class:`~werkzeug.datastructures.Headers`. + + If you want the :meth:`freeze` method to automatically add an etag, you + have to mixin this method before the response base class. The default + response class does not do that. + """ + + @property + def cache_control(self): + """The Cache-Control general-header field is used to specify + directives that MUST be obeyed by all caching mechanisms along the + request/response chain. + """ + def on_update(cache_control): + if not cache_control and 'cache-control' in self.headers: + del self.headers['cache-control'] + elif cache_control: + self.headers['Cache-Control'] = cache_control.to_header() + return parse_cache_control_header(self.headers.get('cache-control'), + on_update, + ResponseCacheControl) + + def make_conditional(self, request_or_environ): + """Make the response conditional to the request. This method works + best if an etag was defined for the response already. The `add_etag` + method can be used to do that. If called without etag just the date + header is set. + + This does nothing if the request method in the request or environ is + anything but GET or HEAD. + + It does not remove the body of the response because that's something + the :meth:`__call__` function does for us automatically. + + Returns self so that you can do ``return resp.make_conditional(req)`` + but modifies the object in-place. + + :param request_or_environ: a request object or WSGI environment to be + used to make the response conditional + against. + """ + environ = _get_environ(request_or_environ) + if environ['REQUEST_METHOD'] in ('GET', 'HEAD'): + # if the date is not in the headers, add it now. We however + # will not override an already existing header. Unfortunately + # this header will be overriden by many WSGI servers including + # wsgiref. + if 'date' not in self.headers: + self.headers['Date'] = http_date() + if self.automatically_set_content_length and 'content-length' not in self.headers: + length = self.calculate_content_length() + if length is not None: + self.headers['Content-Length'] = length + if not is_resource_modified(environ, self.headers.get('etag'), None, + self.headers.get('last-modified')): + self.status_code = 304 + return self + + def add_etag(self, overwrite=False, weak=False): + """Add an etag for the current response if there is none yet.""" + if overwrite or 'etag' not in self.headers: + self.set_etag(generate_etag(self.get_data()), weak) + + def set_etag(self, etag, weak=False): + """Set the etag, and override the old one if there was one.""" + self.headers['ETag'] = quote_etag(etag, weak) + + def get_etag(self): + """Return a tuple in the form ``(etag, is_weak)``. If there is no + ETag the return value is ``(None, None)``. + """ + return unquote_etag(self.headers.get('ETag')) + + def freeze(self, no_etag=False): + """Call this method if you want to make your response object ready for + pickeling. This buffers the generator if there is one. This also + sets the etag unless `no_etag` is set to `True`. + """ + if not no_etag: + self.add_etag() + super(ETagResponseMixin, self).freeze() + + accept_ranges = header_property('Accept-Ranges', doc=''' + The `Accept-Ranges` header. Even though the name would indicate + that multiple values are supported, it must be one string token only. + + The values ``'bytes'`` and ``'none'`` are common. + + .. versionadded:: 0.7''') + + def _get_content_range(self): + def on_update(rng): + if not rng: + del self.headers['content-range'] + else: + self.headers['Content-Range'] = rng.to_header() + rv = parse_content_range_header(self.headers.get('content-range'), + on_update) + # always provide a content range object to make the descriptor + # more user friendly. It provides an unset() method that can be + # used to remove the header quickly. + if rv is None: + rv = ContentRange(None, None, None, on_update=on_update) + return rv + + def _set_content_range(self, value): + if not value: + del self.headers['content-range'] + elif isinstance(value, string_types): + self.headers['Content-Range'] = value + else: + self.headers['Content-Range'] = value.to_header() + content_range = property(_get_content_range, _set_content_range, doc=''' + The `Content-Range` header as + :class:`~werkzeug.datastructures.ContentRange` object. Even if the + header is not set it wil provide such an object for easier + manipulation. + + .. versionadded:: 0.7''') + del _get_content_range, _set_content_range + + +class ResponseStream(object): + + """A file descriptor like object used by the :class:`ResponseStreamMixin` to + represent the body of the stream. It directly pushes into the response + iterable of the response object. + """ + + mode = 'wb+' + + def __init__(self, response): + self.response = response + self.closed = False + + def write(self, value): + if self.closed: + raise ValueError('I/O operation on closed file') + self.response._ensure_sequence(mutable=True) + self.response.response.append(value) + self.response.headers.pop('Content-Length', None) + + def writelines(self, seq): + for item in seq: + self.write(item) + + def close(self): + self.closed = True + + def flush(self): + if self.closed: + raise ValueError('I/O operation on closed file') + + def isatty(self): + if self.closed: + raise ValueError('I/O operation on closed file') + return False + + @property + def encoding(self): + return self.response.charset + + +class ResponseStreamMixin(object): + + """Mixin for :class:`BaseRequest` subclasses. Classes that inherit from + this mixin will automatically get a :attr:`stream` property that provides + a write-only interface to the response iterable. + """ + + @cached_property + def stream(self): + """The response iterable as write-only stream.""" + return ResponseStream(self) + + +class CommonRequestDescriptorsMixin(object): + + """A mixin for :class:`BaseRequest` subclasses. Request objects that + mix this class in will automatically get descriptors for a couple of + HTTP headers with automatic type conversion. + + .. versionadded:: 0.5 + """ + + content_type = environ_property('CONTENT_TYPE', doc=''' + The Content-Type entity-header field indicates the media type of + the entity-body sent to the recipient or, in the case of the HEAD + method, the media type that would have been sent had the request + been a GET.''') + + @cached_property + def content_length(self): + """The Content-Length entity-header field indicates the size of the + entity-body in bytes or, in the case of the HEAD method, the size of + the entity-body that would have been sent had the request been a + GET. + """ + return get_content_length(self.environ) + + content_encoding = environ_property('HTTP_CONTENT_ENCODING', doc=''' + The Content-Encoding entity-header field is used as a modifier to the + media-type. When present, its value indicates what additional content + codings have been applied to the entity-body, and thus what decoding + mechanisms must be applied in order to obtain the media-type + referenced by the Content-Type header field. + + .. versionadded:: 0.9''') + content_md5 = environ_property('HTTP_CONTENT_MD5', doc=''' + The Content-MD5 entity-header field, as defined in RFC 1864, is an + MD5 digest of the entity-body for the purpose of providing an + end-to-end message integrity check (MIC) of the entity-body. (Note: + a MIC is good for detecting accidental modification of the + entity-body in transit, but is not proof against malicious attacks.) + + .. versionadded:: 0.9''') + referrer = environ_property('HTTP_REFERER', doc=''' + The Referer[sic] request-header field allows the client to specify, + for the server's benefit, the address (URI) of the resource from which + the Request-URI was obtained (the "referrer", although the header + field is misspelled).''') + date = environ_property('HTTP_DATE', None, parse_date, doc=''' + The Date general-header field represents the date and time at which + the message was originated, having the same semantics as orig-date + in RFC 822.''') + max_forwards = environ_property('HTTP_MAX_FORWARDS', None, int, doc=''' + The Max-Forwards request-header field provides a mechanism with the + TRACE and OPTIONS methods to limit the number of proxies or gateways + that can forward the request to the next inbound server.''') + + def _parse_content_type(self): + if not hasattr(self, '_parsed_content_type'): + self._parsed_content_type = \ + parse_options_header(self.environ.get('CONTENT_TYPE', '')) + + @property + def mimetype(self): + """Like :attr:`content_type`, but without parameters (eg, without + charset, type etc.) and always lowercase. For example if the content + type is ``text/HTML; charset=utf-8`` the mimetype would be + ``'text/html'``. + """ + self._parse_content_type() + return self._parsed_content_type[0].lower() + + @property + def mimetype_params(self): + """The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + """ + self._parse_content_type() + return self._parsed_content_type[1] + + @cached_property + def pragma(self): + """The Pragma general-header field is used to include + implementation-specific directives that might apply to any recipient + along the request/response chain. All pragma directives specify + optional behavior from the viewpoint of the protocol; however, some + systems MAY require that behavior be consistent with the directives. + """ + return parse_set_header(self.environ.get('HTTP_PRAGMA', '')) + + +class CommonResponseDescriptorsMixin(object): + + """A mixin for :class:`BaseResponse` subclasses. Response objects that + mix this class in will automatically get descriptors for a couple of + HTTP headers with automatic type conversion. + """ + + def _get_mimetype(self): + ct = self.headers.get('content-type') + if ct: + return ct.split(';')[0].strip() + + def _set_mimetype(self, value): + self.headers['Content-Type'] = get_content_type(value, self.charset) + + def _get_mimetype_params(self): + def on_update(d): + self.headers['Content-Type'] = \ + dump_options_header(self.mimetype, d) + d = parse_options_header(self.headers.get('content-type', ''))[1] + return CallbackDict(d, on_update) + + mimetype = property(_get_mimetype, _set_mimetype, doc=''' + The mimetype (content type without charset etc.)''') + mimetype_params = property(_get_mimetype_params, doc=''' + The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.5 + ''') + location = header_property('Location', doc=''' + The Location response-header field is used to redirect the recipient + to a location other than the Request-URI for completion of the request + or identification of a new resource.''') + age = header_property('Age', None, parse_date, http_date, doc=''' + The Age response-header field conveys the sender's estimate of the + amount of time since the response (or its revalidation) was + generated at the origin server. + + Age values are non-negative decimal integers, representing time in + seconds.''') + content_type = header_property('Content-Type', doc=''' + The Content-Type entity-header field indicates the media type of the + entity-body sent to the recipient or, in the case of the HEAD method, + the media type that would have been sent had the request been a GET. + ''') + content_length = header_property('Content-Length', None, int, str, doc=''' + The Content-Length entity-header field indicates the size of the + entity-body, in decimal number of OCTETs, sent to the recipient or, + in the case of the HEAD method, the size of the entity-body that would + have been sent had the request been a GET.''') + content_location = header_property('Content-Location', doc=''' + The Content-Location entity-header field MAY be used to supply the + resource location for the entity enclosed in the message when that + entity is accessible from a location separate from the requested + resource's URI.''') + content_encoding = header_property('Content-Encoding', doc=''' + The Content-Encoding entity-header field is used as a modifier to the + media-type. When present, its value indicates what additional content + codings have been applied to the entity-body, and thus what decoding + mechanisms must be applied in order to obtain the media-type + referenced by the Content-Type header field.''') + content_md5 = header_property('Content-MD5', doc=''' + The Content-MD5 entity-header field, as defined in RFC 1864, is an + MD5 digest of the entity-body for the purpose of providing an + end-to-end message integrity check (MIC) of the entity-body. (Note: + a MIC is good for detecting accidental modification of the + entity-body in transit, but is not proof against malicious attacks.) + ''') + date = header_property('Date', None, parse_date, http_date, doc=''' + The Date general-header field represents the date and time at which + the message was originated, having the same semantics as orig-date + in RFC 822.''') + expires = header_property('Expires', None, parse_date, http_date, doc=''' + The Expires entity-header field gives the date/time after which the + response is considered stale. A stale cache entry may not normally be + returned by a cache.''') + last_modified = header_property('Last-Modified', None, parse_date, + http_date, doc=''' + The Last-Modified entity-header field indicates the date and time at + which the origin server believes the variant was last modified.''') + + def _get_retry_after(self): + value = self.headers.get('retry-after') + if value is None: + return + elif value.isdigit(): + return datetime.utcnow() + timedelta(seconds=int(value)) + return parse_date(value) + + def _set_retry_after(self, value): + if value is None: + if 'retry-after' in self.headers: + del self.headers['retry-after'] + return + elif isinstance(value, datetime): + value = http_date(value) + else: + value = str(value) + self.headers['Retry-After'] = value + + retry_after = property(_get_retry_after, _set_retry_after, doc=''' + The Retry-After response-header field can be used with a 503 (Service + Unavailable) response to indicate how long the service is expected + to be unavailable to the requesting client. + + Time in seconds until expiration or date.''') + + def _set_property(name, doc=None): + def fget(self): + def on_update(header_set): + if not header_set and name in self.headers: + del self.headers[name] + elif header_set: + self.headers[name] = header_set.to_header() + return parse_set_header(self.headers.get(name), on_update) + + def fset(self, value): + if not value: + del self.headers[name] + elif isinstance(value, string_types): + self.headers[name] = value + else: + self.headers[name] = dump_header(value) + return property(fget, fset, doc=doc) + + vary = _set_property('Vary', doc=''' + The Vary field value indicates the set of request-header fields that + fully determines, while the response is fresh, whether a cache is + permitted to use the response to reply to a subsequent request + without revalidation.''') + content_language = _set_property('Content-Language', doc=''' + The Content-Language entity-header field describes the natural + language(s) of the intended audience for the enclosed entity. Note + that this might not be equivalent to all the languages used within + the entity-body.''') + allow = _set_property('Allow', doc=''' + The Allow entity-header field lists the set of methods supported + by the resource identified by the Request-URI. The purpose of this + field is strictly to inform the recipient of valid methods + associated with the resource. An Allow header field MUST be + present in a 405 (Method Not Allowed) response.''') + + del _set_property, _get_mimetype, _set_mimetype, _get_retry_after, \ + _set_retry_after + + +class WWWAuthenticateMixin(object): + + """Adds a :attr:`www_authenticate` property to a response object.""" + + @property + def www_authenticate(self): + """The `WWW-Authenticate` header in a parsed form.""" + def on_update(www_auth): + if not www_auth and 'www-authenticate' in self.headers: + del self.headers['www-authenticate'] + elif www_auth: + self.headers['WWW-Authenticate'] = www_auth.to_header() + header = self.headers.get('www-authenticate') + return parse_www_authenticate_header(header, on_update) + + +class Request(BaseRequest, AcceptMixin, ETagRequestMixin, + UserAgentMixin, AuthorizationMixin, + CommonRequestDescriptorsMixin): + + """Full featured request object implementing the following mixins: + + - :class:`AcceptMixin` for accept header parsing + - :class:`ETagRequestMixin` for etag and cache control handling + - :class:`UserAgentMixin` for user agent introspection + - :class:`AuthorizationMixin` for http auth handling + - :class:`CommonRequestDescriptorsMixin` for common headers + """ + + +class PlainRequest(StreamOnlyMixin, Request): + + """A request object without special form parsing capabilities. + + .. versionadded:: 0.9 + """ + + +class Response(BaseResponse, ETagResponseMixin, ResponseStreamMixin, + CommonResponseDescriptorsMixin, + WWWAuthenticateMixin): + + """Full featured response object implementing the following mixins: + + - :class:`ETagResponseMixin` for etag and cache control handling + - :class:`ResponseStreamMixin` to add support for the `stream` property + - :class:`CommonResponseDescriptorsMixin` for various HTTP descriptors + - :class:`WWWAuthenticateMixin` for HTTP authentication support + """ diff --git a/deps/werkzeug/wsgi.py b/deps/werkzeug/wsgi.py new file mode 100644 index 00000000..2e1c5845 --- /dev/null +++ b/deps/werkzeug/wsgi.py @@ -0,0 +1,1096 @@ +# -*- coding: utf-8 -*- +""" + werkzeug.wsgi + ~~~~~~~~~~~~~ + + This module implements WSGI related helpers. + + :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. + :license: BSD, see LICENSE for more details. +""" +import re +import os +import posixpath +import mimetypes +from itertools import chain +from zlib import adler32 +from time import time, mktime +from datetime import datetime +from functools import partial, update_wrapper + +from werkzeug._compat import iteritems, text_type, string_types, \ + implements_iterator, make_literal_wrapper, to_unicode, to_bytes, \ + wsgi_get_bytes, try_coerce_native, PY2 +from werkzeug._internal import _empty_stream, _encode_idna +from werkzeug.http import is_resource_modified, http_date +from werkzeug.urls import uri_to_iri, url_quote, url_parse, url_join +from werkzeug.filesystem import get_filesystem_encoding + + +def responder(f): + """Marks a function as responder. Decorate a function with it and it + will automatically call the return value as WSGI application. + + Example:: + + @responder + def application(environ, start_response): + return Response('Hello World!') + """ + return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) + + +def get_current_url(environ, root_only=False, strip_querystring=False, + host_only=False, trusted_hosts=None): + """A handy helper function that recreates the full URL as IRI for the + current request or parts of it. Here an example: + + >>> from werkzeug.test import create_environ + >>> env = create_environ("/?param=foo", "http://localhost/script") + >>> get_current_url(env) + 'http://localhost/script/?param=foo' + >>> get_current_url(env, root_only=True) + 'http://localhost/script/' + >>> get_current_url(env, host_only=True) + 'http://localhost/' + >>> get_current_url(env, strip_querystring=True) + 'http://localhost/script/' + + This optionally it verifies that the host is in a list of trusted hosts. + If the host is not in there it will raise a + :exc:`~werkzeug.exceptions.SecurityError`. + + Note that the string returned might contain unicode characters as the + representation is an IRI not an URI. If you need an ASCII only + representation you can use the :func:`~werkzeug.urls.iri_to_uri` + function: + + >>> from werkzeug.urls import iri_to_uri + >>> iri_to_uri(get_current_url(env)) + 'http://localhost/script/?param=foo' + + :param environ: the WSGI environment to get the current URL from. + :param root_only: set `True` if you only want the root URL. + :param strip_querystring: set to `True` if you don't want the querystring. + :param host_only: set to `True` if the host URL should be returned. + :param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted` + for more information. + """ + tmp = [environ['wsgi.url_scheme'], '://', get_host(environ, trusted_hosts)] + cat = tmp.append + if host_only: + return uri_to_iri(''.join(tmp) + '/') + cat(url_quote(wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))).rstrip('/')) + cat('/') + if not root_only: + cat(url_quote(wsgi_get_bytes(environ.get('PATH_INFO', '')).lstrip(b'/'))) + if not strip_querystring: + qs = get_query_string(environ) + if qs: + cat('?' + qs) + return uri_to_iri(''.join(tmp)) + + +def host_is_trusted(hostname, trusted_list): + """Checks if a host is trusted against a list. This also takes care + of port normalization. + + .. versionadded:: 0.9 + + :param hostname: the hostname to check + :param trusted_list: a list of hostnames to check against. If a + hostname starts with a dot it will match against + all subdomains as well. + """ + if not hostname: + return False + + if isinstance(trusted_list, string_types): + trusted_list = [trusted_list] + + def _normalize(hostname): + if ':' in hostname: + hostname = hostname.rsplit(':', 1)[0] + return _encode_idna(hostname) + + try: + hostname = _normalize(hostname) + except UnicodeError: + return False + for ref in trusted_list: + if ref.startswith('.'): + ref = ref[1:] + suffix_match = True + else: + suffix_match = False + try: + ref = _normalize(ref) + except UnicodeError: + return False + if ref == hostname: + return True + if suffix_match and hostname.endswith('.' + ref): + return True + return False + + +def get_host(environ, trusted_hosts=None): + """Return the real host for the given WSGI environment. This first checks + the `X-Forwarded-Host` header, then the normal `Host` header, and finally + the `SERVER_NAME` environment variable (using the first one it finds). + + Optionally it verifies that the host is in a list of trusted hosts. + If the host is not in there it will raise a + :exc:`~werkzeug.exceptions.SecurityError`. + + :param environ: the WSGI environment to get the host of. + :param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted` + for more information. + """ + if 'HTTP_X_FORWARDED_HOST' in environ: + rv = environ['HTTP_X_FORWARDED_HOST'].split(',', 1)[0].strip() + elif 'HTTP_HOST' in environ: + rv = environ['HTTP_HOST'] + else: + rv = environ['SERVER_NAME'] + if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \ + in (('https', '443'), ('http', '80')): + rv += ':' + environ['SERVER_PORT'] + if trusted_hosts is not None: + if not host_is_trusted(rv, trusted_hosts): + from werkzeug.exceptions import SecurityError + raise SecurityError('Host "%s" is not trusted' % rv) + return rv + + +def get_content_length(environ): + """Returns the content length from the WSGI environment as + integer. If it's not available `None` is returned. + + .. versionadded:: 0.9 + + :param environ: the WSGI environ to fetch the content length from. + """ + content_length = environ.get('CONTENT_LENGTH') + if content_length is not None: + try: + return max(0, int(content_length)) + except (ValueError, TypeError): + pass + + +def get_input_stream(environ, safe_fallback=True): + """Returns the input stream from the WSGI environment and wraps it + in the most sensible way possible. The stream returned is not the + raw WSGI stream in most cases but one that is safe to read from + without taking into account the content length. + + .. versionadded:: 0.9 + + :param environ: the WSGI environ to fetch the stream from. + :param safe: indicates whether the function should use an empty + stream as safe fallback or just return the original + WSGI input stream if it can't wrap it safely. The + default is to return an empty string in those cases. + """ + stream = environ['wsgi.input'] + content_length = get_content_length(environ) + + # A wsgi extension that tells us if the input is terminated. In + # that case we return the stream unchanged as we know we can safely + # read it until the end. + if environ.get('wsgi.input_terminated'): + return stream + + # If we don't have a content length we fall back to an empty stream + # in case of a safe fallback, otherwise we return the stream unchanged. + # The non-safe fallback is not recommended but might be useful in + # some situations. + if content_length is None: + return safe_fallback and _empty_stream or stream + + # Otherwise limit the stream to the content length + return LimitedStream(stream, content_length) + + +def get_query_string(environ): + """Returns the `QUERY_STRING` from the WSGI environment. This also takes + care about the WSGI decoding dance on Python 3 environments as a + native string. The string returned will be restricted to ASCII + characters. + + .. versionadded:: 0.9 + + :param environ: the WSGI environment object to get the query string from. + """ + qs = wsgi_get_bytes(environ.get('QUERY_STRING', '')) + # QUERY_STRING really should be ascii safe but some browsers + # will send us some unicode stuff (I am looking at you IE). + # In that case we want to urllib quote it badly. + return try_coerce_native(url_quote(qs, safe=':&%=+$!*\'(),')) + + +def get_path_info(environ, charset='utf-8', errors='replace'): + """Returns the `PATH_INFO` from the WSGI environment and properly + decodes it. This also takes care about the WSGI decoding dance + on Python 3 environments. if the `charset` is set to `None` a + bytestring is returned. + + .. versionadded:: 0.9 + + :param environ: the WSGI environment object to get the path from. + :param charset: the charset for the path info, or `None` if no + decoding should be performed. + :param errors: the decoding error handling. + """ + path = wsgi_get_bytes(environ.get('PATH_INFO', '')) + return to_unicode(path, charset, errors, allow_none_charset=True) + + +def get_script_name(environ, charset='utf-8', errors='replace'): + """Returns the `SCRIPT_NAME` from the WSGI environment and properly + decodes it. This also takes care about the WSGI decoding dance + on Python 3 environments. if the `charset` is set to `None` a + bytestring is returned. + + .. versionadded:: 0.9 + + :param environ: the WSGI environment object to get the path from. + :param charset: the charset for the path, or `None` if no + decoding should be performed. + :param errors: the decoding error handling. + """ + path = wsgi_get_bytes(environ.get('SCRIPT_NAME', '')) + return to_unicode(path, charset, errors, allow_none_charset=True) + + +def pop_path_info(environ, charset='utf-8', errors='replace'): + """Removes and returns the next segment of `PATH_INFO`, pushing it onto + `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. + + If the `charset` is set to `None` a bytestring is returned. + + If there are empty segments (``'/foo//bar``) these are ignored but + properly pushed to the `SCRIPT_NAME`: + + >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} + >>> pop_path_info(env) + 'a' + >>> env['SCRIPT_NAME'] + '/foo/a' + >>> pop_path_info(env) + 'b' + >>> env['SCRIPT_NAME'] + '/foo/a/b' + + .. versionadded:: 0.5 + + .. versionchanged:: 0.9 + The path is now decoded and a charset and encoding + parameter can be provided. + + :param environ: the WSGI environment that is modified. + """ + path = environ.get('PATH_INFO') + if not path: + return None + + script_name = environ.get('SCRIPT_NAME', '') + + # shift multiple leading slashes over + old_path = path + path = path.lstrip('/') + if path != old_path: + script_name += '/' * (len(old_path) - len(path)) + + if '/' not in path: + environ['PATH_INFO'] = '' + environ['SCRIPT_NAME'] = script_name + path + rv = wsgi_get_bytes(path) + else: + segment, path = path.split('/', 1) + environ['PATH_INFO'] = '/' + path + environ['SCRIPT_NAME'] = script_name + segment + rv = wsgi_get_bytes(segment) + + return to_unicode(rv, charset, errors, allow_none_charset=True) + + +def peek_path_info(environ, charset='utf-8', errors='replace'): + """Returns the next segment on the `PATH_INFO` or `None` if there + is none. Works like :func:`pop_path_info` without modifying the + environment: + + >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} + >>> peek_path_info(env) + 'a' + >>> peek_path_info(env) + 'a' + + If the `charset` is set to `None` a bytestring is returned. + + .. versionadded:: 0.5 + + .. versionchanged:: 0.9 + The path is now decoded and a charset and encoding + parameter can be provided. + + :param environ: the WSGI environment that is checked. + """ + segments = environ.get('PATH_INFO', '').lstrip('/').split('/', 1) + if segments: + return to_unicode(wsgi_get_bytes(segments[0]), + charset, errors, allow_none_charset=True) + + +def extract_path_info(environ_or_baseurl, path_or_url, charset='utf-8', + errors='replace', collapse_http_schemes=True): + """Extracts the path info from the given URL (or WSGI environment) and + path. The path info returned is a unicode string, not a bytestring + suitable for a WSGI environment. The URLs might also be IRIs. + + If the path info could not be determined, `None` is returned. + + Some examples: + + >>> extract_path_info('http://example.com/app', '/app/hello') + u'/hello' + >>> extract_path_info('http://example.com/app', + ... 'https://example.com/app/hello') + u'/hello' + >>> extract_path_info('http://example.com/app', + ... 'https://example.com/app/hello', + ... collapse_http_schemes=False) is None + True + + Instead of providing a base URL you can also pass a WSGI environment. + + .. versionadded:: 0.6 + + :param environ_or_baseurl: a WSGI environment dict, a base URL or + base IRI. This is the root of the + application. + :param path_or_url: an absolute path from the server root, a + relative path (in which case it's the path info) + or a full URL. Also accepts IRIs and unicode + parameters. + :param charset: the charset for byte data in URLs + :param errors: the error handling on decode + :param collapse_http_schemes: if set to `False` the algorithm does + not assume that http and https on the + same server point to the same + resource. + """ + def _normalize_netloc(scheme, netloc): + parts = netloc.split(u'@', 1)[-1].split(u':', 1) + if len(parts) == 2: + netloc, port = parts + if (scheme == u'http' and port == u'80') or \ + (scheme == u'https' and port == u'443'): + port = None + else: + netloc = parts[0] + port = None + if port is not None: + netloc += u':' + port + return netloc + + # make sure whatever we are working on is a IRI and parse it + path = uri_to_iri(path_or_url, charset, errors) + if isinstance(environ_or_baseurl, dict): + environ_or_baseurl = get_current_url(environ_or_baseurl, + root_only=True) + base_iri = uri_to_iri(environ_or_baseurl, charset, errors) + base_scheme, base_netloc, base_path = url_parse(base_iri)[:3] + cur_scheme, cur_netloc, cur_path, = \ + url_parse(url_join(base_iri, path))[:3] + + # normalize the network location + base_netloc = _normalize_netloc(base_scheme, base_netloc) + cur_netloc = _normalize_netloc(cur_scheme, cur_netloc) + + # is that IRI even on a known HTTP scheme? + if collapse_http_schemes: + for scheme in base_scheme, cur_scheme: + if scheme not in (u'http', u'https'): + return None + else: + if not (base_scheme in (u'http', u'https') and + base_scheme == cur_scheme): + return None + + # are the netlocs compatible? + if base_netloc != cur_netloc: + return None + + # are we below the application path? + base_path = base_path.rstrip(u'/') + if not cur_path.startswith(base_path): + return None + + return u'/' + cur_path[len(base_path):].lstrip(u'/') + + +class SharedDataMiddleware(object): + + """A WSGI middleware that provides static content for development + environments or simple server setups. Usage is quite simple:: + + import os + from werkzeug.wsgi import SharedDataMiddleware + + app = SharedDataMiddleware(app, { + '/shared': os.path.join(os.path.dirname(__file__), 'shared') + }) + + The contents of the folder ``./shared`` will now be available on + ``http://example.com/shared/``. This is pretty useful during development + because a standalone media server is not required. One can also mount + files on the root folder and still continue to use the application because + the shared data middleware forwards all unhandled requests to the + application, even if the requests are below one of the shared folders. + + If `pkg_resources` is available you can also tell the middleware to serve + files from package data:: + + app = SharedDataMiddleware(app, { + '/shared': ('myapplication', 'shared_files') + }) + + This will then serve the ``shared_files`` folder in the `myapplication` + Python package. + + The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch` + rules for files that are not accessible from the web. If `cache` is set to + `False` no caching headers are sent. + + Currently the middleware does not support non ASCII filenames. If the + encoding on the file system happens to be the encoding of the URI it may + work but this could also be by accident. We strongly suggest using ASCII + only file names for static files. + + The middleware will guess the mimetype using the Python `mimetype` + module. If it's unable to figure out the charset it will fall back + to `fallback_mimetype`. + + .. versionchanged:: 0.5 + The cache timeout is configurable now. + + .. versionadded:: 0.6 + The `fallback_mimetype` parameter was added. + + :param app: the application to wrap. If you don't want to wrap an + application you can pass it :exc:`NotFound`. + :param exports: a dict of exported files and folders. + :param disallow: a list of :func:`~fnmatch.fnmatch` rules. + :param fallback_mimetype: the fallback mimetype for unknown files. + :param cache: enable or disable caching headers. + :param cache_timeout: the cache timeout in seconds for the headers. + """ + + def __init__(self, app, exports, disallow=None, cache=True, + cache_timeout=60 * 60 * 12, fallback_mimetype='text/plain'): + self.app = app + self.exports = {} + self.cache = cache + self.cache_timeout = cache_timeout + for key, value in iteritems(exports): + if isinstance(value, tuple): + loader = self.get_package_loader(*value) + elif isinstance(value, string_types): + if os.path.isfile(value): + loader = self.get_file_loader(value) + else: + loader = self.get_directory_loader(value) + else: + raise TypeError('unknown def %r' % value) + self.exports[key] = loader + if disallow is not None: + from fnmatch import fnmatch + self.is_allowed = lambda x: not fnmatch(x, disallow) + self.fallback_mimetype = fallback_mimetype + + def is_allowed(self, filename): + """Subclasses can override this method to disallow the access to + certain files. However by providing `disallow` in the constructor + this method is overwritten. + """ + return True + + def _opener(self, filename): + return lambda: ( + open(filename, 'rb'), + datetime.utcfromtimestamp(os.path.getmtime(filename)), + int(os.path.getsize(filename)) + ) + + def get_file_loader(self, filename): + return lambda x: (os.path.basename(filename), self._opener(filename)) + + def get_package_loader(self, package, package_path): + from pkg_resources import DefaultProvider, ResourceManager, \ + get_provider + loadtime = datetime.utcnow() + provider = get_provider(package) + manager = ResourceManager() + filesystem_bound = isinstance(provider, DefaultProvider) + + def loader(path): + if path is None: + return None, None + path = posixpath.join(package_path, path) + if not provider.has_resource(path): + return None, None + basename = posixpath.basename(path) + if filesystem_bound: + return basename, self._opener( + provider.get_resource_filename(manager, path)) + return basename, lambda: ( + provider.get_resource_stream(manager, path), + loadtime, + 0 + ) + return loader + + def get_directory_loader(self, directory): + def loader(path): + if path is not None: + path = os.path.join(directory, path) + else: + path = directory + if os.path.isfile(path): + return os.path.basename(path), self._opener(path) + return None, None + return loader + + def generate_etag(self, mtime, file_size, real_filename): + if not isinstance(real_filename, bytes): + real_filename = real_filename.encode(get_filesystem_encoding()) + return 'wzsdm-%d-%s-%s' % ( + mktime(mtime.timetuple()), + file_size, + adler32(real_filename) & 0xffffffff + ) + + def __call__(self, environ, start_response): + cleaned_path = get_path_info(environ) + if PY2: + cleaned_path = cleaned_path.encode(get_filesystem_encoding()) + # sanitize the path for non unix systems + cleaned_path = cleaned_path.strip('/') + for sep in os.sep, os.altsep: + if sep and sep != '/': + cleaned_path = cleaned_path.replace(sep, '/') + path = '/' + '/'.join(x for x in cleaned_path.split('/') + if x and x != '..') + file_loader = None + for search_path, loader in iteritems(self.exports): + if search_path == path: + real_filename, file_loader = loader(None) + if file_loader is not None: + break + if not search_path.endswith('/'): + search_path += '/' + if path.startswith(search_path): + real_filename, file_loader = loader(path[len(search_path):]) + if file_loader is not None: + break + if file_loader is None or not self.is_allowed(real_filename): + return self.app(environ, start_response) + + guessed_type = mimetypes.guess_type(real_filename) + mime_type = guessed_type[0] or self.fallback_mimetype + f, mtime, file_size = file_loader() + + headers = [('Date', http_date())] + if self.cache: + timeout = self.cache_timeout + etag = self.generate_etag(mtime, file_size, real_filename) + headers += [ + ('Etag', '"%s"' % etag), + ('Cache-Control', 'max-age=%d, public' % timeout) + ] + if not is_resource_modified(environ, etag, last_modified=mtime): + f.close() + start_response('304 Not Modified', headers) + return [] + headers.append(('Expires', http_date(time() + timeout))) + else: + headers.append(('Cache-Control', 'public')) + + headers.extend(( + ('Content-Type', mime_type), + ('Content-Length', str(file_size)), + ('Last-Modified', http_date(mtime)) + )) + start_response('200 OK', headers) + return wrap_file(environ, f) + + +class DispatcherMiddleware(object): + + """Allows one to mount middlewares or applications in a WSGI application. + This is useful if you want to combine multiple WSGI applications:: + + app = DispatcherMiddleware(app, { + '/app2': app2, + '/app3': app3 + }) + """ + + def __init__(self, app, mounts=None): + self.app = app + self.mounts = mounts or {} + + def __call__(self, environ, start_response): + script = environ.get('PATH_INFO', '') + path_info = '' + while '/' in script: + if script in self.mounts: + app = self.mounts[script] + break + script, last_item = script.rsplit('/', 1) + path_info = '/%s%s' % (last_item, path_info) + else: + app = self.mounts.get(script, self.app) + original_script_name = environ.get('SCRIPT_NAME', '') + environ['SCRIPT_NAME'] = original_script_name + script + environ['PATH_INFO'] = path_info + return app(environ, start_response) + + +@implements_iterator +class ClosingIterator(object): + + """The WSGI specification requires that all middlewares and gateways + respect the `close` callback of an iterator. Because it is useful to add + another close action to a returned iterator and adding a custom iterator + is a boring task this class can be used for that:: + + return ClosingIterator(app(environ, start_response), [cleanup_session, + cleanup_locals]) + + If there is just one close function it can be passed instead of the list. + + A closing iterator is not needed if the application uses response objects + and finishes the processing if the response is started:: + + try: + return response(environ, start_response) + finally: + cleanup_session() + cleanup_locals() + """ + + def __init__(self, iterable, callbacks=None): + iterator = iter(iterable) + self._next = partial(next, iterator) + if callbacks is None: + callbacks = [] + elif callable(callbacks): + callbacks = [callbacks] + else: + callbacks = list(callbacks) + iterable_close = getattr(iterator, 'close', None) + if iterable_close: + callbacks.insert(0, iterable_close) + self._callbacks = callbacks + + def __iter__(self): + return self + + def __next__(self): + return self._next() + + def close(self): + for callback in self._callbacks: + callback() + + +def wrap_file(environ, file, buffer_size=8192): + """Wraps a file. This uses the WSGI server's file wrapper if available + or otherwise the generic :class:`FileWrapper`. + + .. versionadded:: 0.5 + + If the file wrapper from the WSGI server is used it's important to not + iterate over it from inside the application but to pass it through + unchanged. If you want to pass out a file wrapper inside a response + object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`. + + More information about file wrappers are available in :pep:`333`. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + return environ.get('wsgi.file_wrapper', FileWrapper)(file, buffer_size) + + +@implements_iterator +class FileWrapper(object): + + """This class can be used to convert a :class:`file`-like object into + an iterable. It yields `buffer_size` blocks until the file is fully + read. + + You should not use this class directly but rather use the + :func:`wrap_file` function that uses the WSGI server's file wrapper + support if it's available. + + .. versionadded:: 0.5 + + If you're using this object together with a :class:`BaseResponse` you have + to use the `direct_passthrough` mode. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + + def __init__(self, file, buffer_size=8192): + self.file = file + self.buffer_size = buffer_size + + def close(self): + if hasattr(self.file, 'close'): + self.file.close() + + def __iter__(self): + return self + + def __next__(self): + data = self.file.read(self.buffer_size) + if data: + return data + raise StopIteration() + + +def _make_chunk_iter(stream, limit, buffer_size): + """Helper for the line and chunk iter functions.""" + if isinstance(stream, (bytes, bytearray, text_type)): + raise TypeError('Passed a string or byte object instead of ' + 'true iterator or stream.') + if not hasattr(stream, 'read'): + for item in stream: + if item: + yield item + return + if not isinstance(stream, LimitedStream) and limit is not None: + stream = LimitedStream(stream, limit) + _read = stream.read + while 1: + item = _read(buffer_size) + if not item: + break + yield item + + +def make_line_iter(stream, limit=None, buffer_size=10 * 1024, + cap_at_buffer=False): + """Safely iterates line-based over an input stream. If the input stream + is not a :class:`LimitedStream` the `limit` parameter is mandatory. + + This uses the stream's :meth:`~file.read` method internally as opposite + to the :meth:`~file.readline` method that is unsafe and can only be used + in violation of the WSGI specification. The same problem applies to the + `__iter__` function of the input stream which calls :meth:`~file.readline` + without arguments. + + If you need line-by-line processing it's strongly recommended to iterate + over the input stream using this helper function. + + .. versionchanged:: 0.8 + This function now ensures that the limit was reached. + + .. versionadded:: 0.9 + added support for iterators as input stream. + + :param stream: the stream or iterate to iterate over. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is a :class:`LimitedStream`. + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + .. versionadded:: 0.11.10 + added support for the `cap_at_buffer` parameter. + """ + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, '') + if not first_item: + return + + s = make_literal_wrapper(first_item) + empty = s('') + cr = s('\r') + lf = s('\n') + crlf = s('\r\n') + + _iter = chain((first_item,), _iter) + + def _iter_basic_lines(): + _join = empty.join + buffer = [] + while 1: + new_data = next(_iter, '') + if not new_data: + break + new_buf = [] + buf_size = 0 + for item in chain(buffer, new_data.splitlines(True)): + new_buf.append(item) + buf_size += len(item) + if item and item[-1:] in crlf: + yield _join(new_buf) + new_buf = [] + elif cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buffer = new_buf + if buffer: + yield _join(buffer) + + # This hackery is necessary to merge 'foo\r' and '\n' into one item + # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. + previous = empty + for item in _iter_basic_lines(): + if item == lf and previous[-1:] == cr: + previous += item + item = empty + if previous: + yield previous + previous = item + if previous: + yield previous + + +def make_chunk_iter(stream, separator, limit=None, buffer_size=10 * 1024, + cap_at_buffer=False): + """Works like :func:`make_line_iter` but accepts a separator + which divides chunks. If you want newline based processing + you should use :func:`make_line_iter` instead as it + supports arbitrary newline markers. + + .. versionadded:: 0.8 + + .. versionadded:: 0.9 + added support for iterators as input stream. + + .. versionadded:: 0.11.10 + added support for the `cap_at_buffer` parameter. + + :param stream: the stream or iterate to iterate over. + :param separator: the separator that divides chunks. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is otherwise already limited). + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, '') + if not first_item: + return + + _iter = chain((first_item,), _iter) + if isinstance(first_item, text_type): + separator = to_unicode(separator) + _split = re.compile(r'(%s)' % re.escape(separator)).split + _join = u''.join + else: + separator = to_bytes(separator) + _split = re.compile(b'(' + re.escape(separator) + b')').split + _join = b''.join + + buffer = [] + while 1: + new_data = next(_iter, '') + if not new_data: + break + chunks = _split(new_data) + new_buf = [] + buf_size = 0 + for item in chain(buffer, chunks): + if item == separator: + yield _join(new_buf) + new_buf = [] + buf_size = 0 + else: + buf_size += len(item) + new_buf.append(item) + + if cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buf_size = len(rv) + + buffer = new_buf + if buffer: + yield _join(buffer) + + +@implements_iterator +class LimitedStream(object): + + """Wraps a stream so that it doesn't read more than n bytes. If the + stream is exhausted and the caller tries to get more bytes from it + :func:`on_exhausted` is called which by default returns an empty + string. The return value of that function is forwarded + to the reader function. So if it returns an empty string + :meth:`read` will return an empty string as well. + + The limit however must never be higher than what the stream can + output. Otherwise :meth:`readlines` will try to read past the + limit. + + .. admonition:: Note on WSGI compliance + + calls to :meth:`readline` and :meth:`readlines` are not + WSGI compliant because it passes a size argument to the + readline methods. Unfortunately the WSGI PEP is not safely + implementable without a size argument to :meth:`readline` + because there is no EOF marker in the stream. As a result + of that the use of :meth:`readline` is discouraged. + + For the same reason iterating over the :class:`LimitedStream` + is not portable. It internally calls :meth:`readline`. + + We strongly suggest using :meth:`read` only or using the + :func:`make_line_iter` which safely iterates line-based + over a WSGI input stream. + + :param stream: the stream to wrap. + :param limit: the limit for the stream, must not be longer than + what the string can provide if the stream does not + end with `EOF` (like `wsgi.input`) + """ + + def __init__(self, stream, limit): + self._read = stream.read + self._readline = stream.readline + self._pos = 0 + self.limit = limit + + def __iter__(self): + return self + + @property + def is_exhausted(self): + """If the stream is exhausted this attribute is `True`.""" + return self._pos >= self.limit + + def on_exhausted(self): + """This is called when the stream tries to read past the limit. + The return value of this function is returned from the reading + function. + """ + # Read null bytes from the stream so that we get the + # correct end of stream marker. + return self._read(0) + + def on_disconnect(self): + """What should happen if a disconnect is detected? The return + value of this function is returned from read functions in case + the client went away. By default a + :exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised. + """ + from werkzeug.exceptions import ClientDisconnected + raise ClientDisconnected() + + def exhaust(self, chunk_size=1024 * 64): + """Exhaust the stream. This consumes all the data left until the + limit is reached. + + :param chunk_size: the size for a chunk. It will read the chunk + until the stream is exhausted and throw away + the results. + """ + to_read = self.limit - self._pos + chunk = chunk_size + while to_read > 0: + chunk = min(to_read, chunk) + self.read(chunk) + to_read -= chunk + + def read(self, size=None): + """Read `size` bytes or if size is not provided everything is read. + + :param size: the number of bytes read. + """ + if self._pos >= self.limit: + return self.on_exhausted() + if size is None or size == -1: # -1 is for consistence with file + size = self.limit + to_read = min(self.limit - self._pos, size) + try: + read = self._read(to_read) + except (IOError, ValueError): + return self.on_disconnect() + if to_read and len(read) != to_read: + return self.on_disconnect() + self._pos += len(read) + return read + + def readline(self, size=None): + """Reads one line from the stream.""" + if self._pos >= self.limit: + return self.on_exhausted() + if size is None: + size = self.limit - self._pos + else: + size = min(size, self.limit - self._pos) + try: + line = self._readline(size) + except (ValueError, IOError): + return self.on_disconnect() + if size and not line: + return self.on_disconnect() + self._pos += len(line) + return line + + def readlines(self, size=None): + """Reads a file into a list of strings. It calls :meth:`readline` + until the file is read to the end. It does support the optional + `size` argument if the underlaying stream supports it for + `readline`. + """ + last_pos = self._pos + result = [] + if size is not None: + end = min(self.limit, last_pos + size) + else: + end = self.limit + while 1: + if size is not None: + size -= last_pos - self._pos + if self._pos >= end: + break + result.append(self.readline(size)) + if size is not None: + last_pos = self._pos + return result + + def tell(self): + """Returns the position of the stream. + + .. versionadded:: 0.9 + """ + return self._pos + + def __next__(self): + line = self.readline() + if not line: + raise StopIteration() + return line diff --git a/deps/zeroconf-0.17.6.dist-info/DESCRIPTION.rst b/deps/zeroconf-0.17.6.dist-info/DESCRIPTION.rst new file mode 100644 index 00000000..d52514cf --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/DESCRIPTION.rst @@ -0,0 +1,309 @@ +python-zeroconf +=============== + +.. image:: https://travis-ci.org/jstasiak/python-zeroconf.svg?branch=master + :target: https://travis-ci.org/jstasiak/python-zeroconf + +.. image:: https://img.shields.io/pypi/v/zeroconf.svg + :target: https://pypi.python.org/pypi/zeroconf + +.. image:: https://img.shields.io/coveralls/jstasiak/python-zeroconf.svg + :target: https://coveralls.io/r/jstasiak/python-zeroconf + + +This is fork of pyzeroconf, Multicast DNS Service Discovery for Python, +originally by Paul Scott-Murphy (https://github.com/paulsm/pyzeroconf), +modified by William McBrine (https://github.com/wmcbrine/pyzeroconf). + +The original William McBrine's fork note:: + + This fork is used in all of my TiVo-related projects: HME for Python + (and therefore HME/VLC), Network Remote, Remote Proxy, and pyTivo. + Before this, I was tracking the changes for zeroconf.py in three + separate repos. I figured I should have an authoritative source. + + Although I make changes based on my experience with TiVos, I expect that + they're generally applicable. This version also includes patches found + on the now-defunct (?) Launchpad repo of pyzeroconf, and elsewhere + around the net -- not always well-documented, sorry. + +Compatible with: + +* Bonjour +* Avahi + +Compared to some other Zeroconf/Bonjour/Avahi Python packages, python-zeroconf: + +* isn't tied to Bonjour or Avahi +* doesn't use D-Bus +* doesn't force you to use particular event loop or Twisted +* is pip-installable +* has PyPI distribution + +Python compatibility +-------------------- + +* CPython 2.6, 2.7, 3.3+ +* PyPy 2.2+ (possibly 1.9-2.1 as well) +* PyPy3 2.4+ + +Versioning +---------- + +This project's versions follow the following pattern: MAJOR.MINOR.PATCH. + +* MAJOR version has been 0 so far +* MINOR version is incremented on backward incompatible changes +* PATCH version is incremented on backward compatible changes + +Status +------ + +There are some people using this package. I don't actively use it and as such +any help I can offer with regard to any issues is very limited. + + +How to get python-zeroconf? +=========================== + +* PyPI page https://pypi.python.org/pypi/zeroconf +* GitHub project https://github.com/jstasiak/python-zeroconf + +The easiest way to install python-zeroconf is using pip:: + + pip install zeroconf + + + +How do I use it? +================ + +Here's an example: + +.. code-block:: python + + from six.moves import input + from zeroconf import ServiceBrowser, Zeroconf + + + class MyListener(object): + + def remove_service(self, zeroconf, type, name): + print("Service %s removed" % (name,)) + + def add_service(self, zeroconf, type, name): + info = zeroconf.get_service_info(type, name) + print("Service %s added, service info: %s" % (name, info)) + + + zeroconf = Zeroconf() + listener = MyListener() + browser = ServiceBrowser(zeroconf, "_http._tcp.local.", listener) + try: + input("Press enter to exit...\n\n") + finally: + zeroconf.close() + +.. note:: + + Discovery and service registration use *all* available network interfaces by default. + If you want to customize that you need to specify ``interfaces`` argument when + constructing ``Zeroconf`` object (see the code for details). + +If you don't know the name of the service you need to browse for, try: + +.. code-block:: python + + from zeroconf import ZeroconfServiceTypes + print('\n'.join(ZeroconfServiceTypes.find())) + +See examples directory for more. + +Changelog +========= + +0.17.6 +------ + +* Many improvements to address race conditions and exceptions during ZC() + startup and shutdown, thanks to: morpav, veawor, justingiorgi, herczy, + stephenrauch +* Added more test coverage: strahlex, stephenrauch +* Stephen Rauch contributed: + + - Speed up browser startup + - Add ZeroconfServiceTypes() query class to discover all advertised service types + - Add full validation for service names, types and subtypes + - Fix for subtype browsing + - Fix DNSHInfo support + +0.17.5 +------ + +* Fixed OpenBSD compatibility, thanks to Alessio Sergi +* Fixed race condition on ServiceBrowser startup, thanks to gbiddison +* Fixed installation on some Python 3 systems, thanks to Per Sandström +* Fixed "size change during iteration" bug on Python 3, thanks to gbiddison + +0.17.4 +------ + +* Fixed support for Linux kernel versions < 3.9 (thanks to Giovanni Harting + and Luckydonald, GitHub pull request #26) + +0.17.3 +------ + +* Fixed DNSText repr on Python 3 (it'd crash when the text was longer than + 10 bytes), thanks to Paulus Schoutsen for the patch, GitHub pull request #24 + +0.17.2 +------ + +* Fixed installation on Python 3.4.3+ (was failing because of enum34 dependency + which fails to install on 3.4.3+, changed to depend on enum-compat instead; + thanks to Michael Brennan for the original patch, GitHub pull request #22) + +0.17.1 +------ + +* Fixed EADDRNOTAVAIL when attempting to use dummy network interfaces on Windows, + thanks to daid + +0.17.0 +------ + +* Added some Python dependencies so it's not zero-dependencies anymore +* Improved exception handling (it'll be quieter now) +* Messages are listened to and sent using all available network interfaces + by default (configurable); thanks to Marcus Müller +* Started using logging more freely +* Fixed a bug with binary strings as property values being converted to False + (https://github.com/jstasiak/python-zeroconf/pull/10); thanks to Dr. Seuss +* Added new ``ServiceBrowser`` event handler interface (see the examples) +* PyPy3 now officially supported +* Fixed ServiceInfo repr on Python 3, thanks to Yordan Miladinov + +0.16.0 +------ + +* Set up Python logging and started using it +* Cleaned up code style (includes migrating from camel case to snake case) + +0.15.1 +------ + +* Fixed handling closed socket (GitHub #4) + +0.15 +---- + +* Forked by Jakub Stasiak +* Made Python 3 compatible +* Added setup script, made installable by pip and uploaded to PyPI +* Set up Travis build +* Reformatted the code and moved files around +* Stopped catching BaseException in several places, that could hide errors +* Marked threads as daemonic, they won't keep application alive now + +0.14 +---- + +* Fix for SOL_IP undefined on some systems - thanks Mike Erdely. +* Cleaned up examples. +* Lowercased module name. + +0.13 +---- + +* Various minor changes; see git for details. +* No longer compatible with Python 2.2. Only tested with 2.5-2.7. +* Fork by William McBrine. + +0.12 +---- + +* allow selection of binding interface +* typo fix - Thanks A. M. Kuchlingi +* removed all use of word 'Rendezvous' - this is an API change + +0.11 +---- + +* correction to comments for addListener method +* support for new record types seen from OS X + - IPv6 address + - hostinfo + +* ignore unknown DNS record types +* fixes to name decoding +* works alongside other processes using port 5353 (e.g. on Mac OS X) +* tested against Mac OS X 10.3.2's mDNSResponder +* corrections to removal of list entries for service browser + +0.10 +---- + +* Jonathon Paisley contributed these corrections: + + - always multicast replies, even when query is unicast + - correct a pointer encoding problem + - can now write records in any order + - traceback shown on failure + - better TXT record parsing + - server is now separate from name + - can cancel a service browser + +* modified some unit tests to accommodate these changes + +0.09 +---- + +* remove all records on service unregistration +* fix DOS security problem with readName + +0.08 +---- + +* changed licensing to LGPL + +0.07 +---- + +* faster shutdown on engine +* pointer encoding of outgoing names +* ServiceBrowser now works +* new unit tests + +0.06 +---- +* small improvements with unit tests +* added defined exception types +* new style objects +* fixed hostname/interface problem +* fixed socket timeout problem +* fixed add_service_listener() typo bug +* using select() for socket reads +* tested on Debian unstable with Python 2.2.2 + +0.05 +---- + +* ensure case insensitivty on domain names +* support for unicast DNS queries + +0.04 +---- + +* added some unit tests +* added __ne__ adjuncts where required +* ensure names end in '.local.' +* timeout on receiving socket for clean shutdown + + +License +======= + +LGPL, see COPYING file for details. + + diff --git a/deps/zeroconf-0.17.6.dist-info/INSTALLER b/deps/zeroconf-0.17.6.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deps/zeroconf-0.17.6.dist-info/METADATA b/deps/zeroconf-0.17.6.dist-info/METADATA new file mode 100644 index 00000000..7e2758b2 --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/METADATA @@ -0,0 +1,342 @@ +Metadata-Version: 2.0 +Name: zeroconf +Version: 0.17.6 +Summary: Pure Python Multicast DNS Service Discovery Library (Bonjour/Avahi compatible) +Home-page: https://github.com/jstasiak/python-zeroconf +Author: Paul Scott-Murphy, William McBrine, Jakub Stasiak +Author-email: UNKNOWN +License: LGPL +Keywords: Bonjour,Avahi,Zeroconf,Multicast DNS,Service Discovery,mDNS +Platform: unix +Platform: linux +Platform: osx +Classifier: Development Status :: 3 - Alpha +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2) +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Topic :: Software Development :: Libraries +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: enum-compat +Requires-Dist: netifaces +Requires-Dist: six + +python-zeroconf +=============== + +.. image:: https://travis-ci.org/jstasiak/python-zeroconf.svg?branch=master + :target: https://travis-ci.org/jstasiak/python-zeroconf + +.. image:: https://img.shields.io/pypi/v/zeroconf.svg + :target: https://pypi.python.org/pypi/zeroconf + +.. image:: https://img.shields.io/coveralls/jstasiak/python-zeroconf.svg + :target: https://coveralls.io/r/jstasiak/python-zeroconf + + +This is fork of pyzeroconf, Multicast DNS Service Discovery for Python, +originally by Paul Scott-Murphy (https://github.com/paulsm/pyzeroconf), +modified by William McBrine (https://github.com/wmcbrine/pyzeroconf). + +The original William McBrine's fork note:: + + This fork is used in all of my TiVo-related projects: HME for Python + (and therefore HME/VLC), Network Remote, Remote Proxy, and pyTivo. + Before this, I was tracking the changes for zeroconf.py in three + separate repos. I figured I should have an authoritative source. + + Although I make changes based on my experience with TiVos, I expect that + they're generally applicable. This version also includes patches found + on the now-defunct (?) Launchpad repo of pyzeroconf, and elsewhere + around the net -- not always well-documented, sorry. + +Compatible with: + +* Bonjour +* Avahi + +Compared to some other Zeroconf/Bonjour/Avahi Python packages, python-zeroconf: + +* isn't tied to Bonjour or Avahi +* doesn't use D-Bus +* doesn't force you to use particular event loop or Twisted +* is pip-installable +* has PyPI distribution + +Python compatibility +-------------------- + +* CPython 2.6, 2.7, 3.3+ +* PyPy 2.2+ (possibly 1.9-2.1 as well) +* PyPy3 2.4+ + +Versioning +---------- + +This project's versions follow the following pattern: MAJOR.MINOR.PATCH. + +* MAJOR version has been 0 so far +* MINOR version is incremented on backward incompatible changes +* PATCH version is incremented on backward compatible changes + +Status +------ + +There are some people using this package. I don't actively use it and as such +any help I can offer with regard to any issues is very limited. + + +How to get python-zeroconf? +=========================== + +* PyPI page https://pypi.python.org/pypi/zeroconf +* GitHub project https://github.com/jstasiak/python-zeroconf + +The easiest way to install python-zeroconf is using pip:: + + pip install zeroconf + + + +How do I use it? +================ + +Here's an example: + +.. code-block:: python + + from six.moves import input + from zeroconf import ServiceBrowser, Zeroconf + + + class MyListener(object): + + def remove_service(self, zeroconf, type, name): + print("Service %s removed" % (name,)) + + def add_service(self, zeroconf, type, name): + info = zeroconf.get_service_info(type, name) + print("Service %s added, service info: %s" % (name, info)) + + + zeroconf = Zeroconf() + listener = MyListener() + browser = ServiceBrowser(zeroconf, "_http._tcp.local.", listener) + try: + input("Press enter to exit...\n\n") + finally: + zeroconf.close() + +.. note:: + + Discovery and service registration use *all* available network interfaces by default. + If you want to customize that you need to specify ``interfaces`` argument when + constructing ``Zeroconf`` object (see the code for details). + +If you don't know the name of the service you need to browse for, try: + +.. code-block:: python + + from zeroconf import ZeroconfServiceTypes + print('\n'.join(ZeroconfServiceTypes.find())) + +See examples directory for more. + +Changelog +========= + +0.17.6 +------ + +* Many improvements to address race conditions and exceptions during ZC() + startup and shutdown, thanks to: morpav, veawor, justingiorgi, herczy, + stephenrauch +* Added more test coverage: strahlex, stephenrauch +* Stephen Rauch contributed: + + - Speed up browser startup + - Add ZeroconfServiceTypes() query class to discover all advertised service types + - Add full validation for service names, types and subtypes + - Fix for subtype browsing + - Fix DNSHInfo support + +0.17.5 +------ + +* Fixed OpenBSD compatibility, thanks to Alessio Sergi +* Fixed race condition on ServiceBrowser startup, thanks to gbiddison +* Fixed installation on some Python 3 systems, thanks to Per Sandström +* Fixed "size change during iteration" bug on Python 3, thanks to gbiddison + +0.17.4 +------ + +* Fixed support for Linux kernel versions < 3.9 (thanks to Giovanni Harting + and Luckydonald, GitHub pull request #26) + +0.17.3 +------ + +* Fixed DNSText repr on Python 3 (it'd crash when the text was longer than + 10 bytes), thanks to Paulus Schoutsen for the patch, GitHub pull request #24 + +0.17.2 +------ + +* Fixed installation on Python 3.4.3+ (was failing because of enum34 dependency + which fails to install on 3.4.3+, changed to depend on enum-compat instead; + thanks to Michael Brennan for the original patch, GitHub pull request #22) + +0.17.1 +------ + +* Fixed EADDRNOTAVAIL when attempting to use dummy network interfaces on Windows, + thanks to daid + +0.17.0 +------ + +* Added some Python dependencies so it's not zero-dependencies anymore +* Improved exception handling (it'll be quieter now) +* Messages are listened to and sent using all available network interfaces + by default (configurable); thanks to Marcus Müller +* Started using logging more freely +* Fixed a bug with binary strings as property values being converted to False + (https://github.com/jstasiak/python-zeroconf/pull/10); thanks to Dr. Seuss +* Added new ``ServiceBrowser`` event handler interface (see the examples) +* PyPy3 now officially supported +* Fixed ServiceInfo repr on Python 3, thanks to Yordan Miladinov + +0.16.0 +------ + +* Set up Python logging and started using it +* Cleaned up code style (includes migrating from camel case to snake case) + +0.15.1 +------ + +* Fixed handling closed socket (GitHub #4) + +0.15 +---- + +* Forked by Jakub Stasiak +* Made Python 3 compatible +* Added setup script, made installable by pip and uploaded to PyPI +* Set up Travis build +* Reformatted the code and moved files around +* Stopped catching BaseException in several places, that could hide errors +* Marked threads as daemonic, they won't keep application alive now + +0.14 +---- + +* Fix for SOL_IP undefined on some systems - thanks Mike Erdely. +* Cleaned up examples. +* Lowercased module name. + +0.13 +---- + +* Various minor changes; see git for details. +* No longer compatible with Python 2.2. Only tested with 2.5-2.7. +* Fork by William McBrine. + +0.12 +---- + +* allow selection of binding interface +* typo fix - Thanks A. M. Kuchlingi +* removed all use of word 'Rendezvous' - this is an API change + +0.11 +---- + +* correction to comments for addListener method +* support for new record types seen from OS X + - IPv6 address + - hostinfo + +* ignore unknown DNS record types +* fixes to name decoding +* works alongside other processes using port 5353 (e.g. on Mac OS X) +* tested against Mac OS X 10.3.2's mDNSResponder +* corrections to removal of list entries for service browser + +0.10 +---- + +* Jonathon Paisley contributed these corrections: + + - always multicast replies, even when query is unicast + - correct a pointer encoding problem + - can now write records in any order + - traceback shown on failure + - better TXT record parsing + - server is now separate from name + - can cancel a service browser + +* modified some unit tests to accommodate these changes + +0.09 +---- + +* remove all records on service unregistration +* fix DOS security problem with readName + +0.08 +---- + +* changed licensing to LGPL + +0.07 +---- + +* faster shutdown on engine +* pointer encoding of outgoing names +* ServiceBrowser now works +* new unit tests + +0.06 +---- +* small improvements with unit tests +* added defined exception types +* new style objects +* fixed hostname/interface problem +* fixed socket timeout problem +* fixed add_service_listener() typo bug +* using select() for socket reads +* tested on Debian unstable with Python 2.2.2 + +0.05 +---- + +* ensure case insensitivty on domain names +* support for unicast DNS queries + +0.04 +---- + +* added some unit tests +* added __ne__ adjuncts where required +* ensure names end in '.local.' +* timeout on receiving socket for clean shutdown + + +License +======= + +LGPL, see COPYING file for details. + + diff --git a/deps/zeroconf-0.17.6.dist-info/RECORD b/deps/zeroconf-0.17.6.dist-info/RECORD new file mode 100644 index 00000000..05f516dc --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/RECORD @@ -0,0 +1,10 @@ +zeroconf.py,sha256=5HCJquradMRqWpwREnCJDyCtQRFpvauxcT2l0O9T0cg,62509 +zeroconf-0.17.6.dist-info/DESCRIPTION.rst,sha256=f94iqUTdBctpaZZfaUgRbXhAYHulolXoqFsGsTvaZQY,8159 +zeroconf-0.17.6.dist-info/METADATA,sha256=U9bpg7bW0GUgTw5rCO4Y_4zQrkLb7Kf2szNW1IRb_z8,9533 +zeroconf-0.17.6.dist-info/RECORD,, +zeroconf-0.17.6.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +zeroconf-0.17.6.dist-info/metadata.json,sha256=RuMiXO6RSn1f1Tu1CxISyBre7g4AKWdFVqFNs2_UWqo,1437 +zeroconf-0.17.6.dist-info/pbr.json,sha256=6YIrYDsheNOX2fcy6S_Jw7xMl65fgtWkooyLbEQLGjE,46 +zeroconf-0.17.6.dist-info/top_level.txt,sha256=G_yoNgGm6QMZZpH139yjBdEQpn-jCn0EN5Zvy0kJuII,9 +zeroconf-0.17.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +__pycache__/zeroconf.cpython-34.pyc,, diff --git a/deps/zeroconf-0.17.6.dist-info/WHEEL b/deps/zeroconf-0.17.6.dist-info/WHEEL new file mode 100644 index 00000000..8b6dd1b5 --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/deps/zeroconf-0.17.6.dist-info/metadata.json b/deps/zeroconf-0.17.6.dist-info/metadata.json new file mode 100644 index 00000000..4a054e6c --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Libraries", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"contacts": [{"name": "Paul Scott-Murphy, William McBrine, Jakub Stasiak", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/jstasiak/python-zeroconf"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "keywords": ["Bonjour", "Avahi", "Zeroconf", "Multicast", "DNS", "Service", "Discovery", "mDNS"], "license": "LGPL", "metadata_version": "2.0", "name": "zeroconf", "platform": "unix", "run_requires": [{"requires": ["enum-compat", "netifaces", "six"]}], "summary": "Pure Python Multicast DNS Service Discovery Library (Bonjour/Avahi compatible)", "version": "0.17.6"} \ No newline at end of file diff --git a/deps/zeroconf-0.17.6.dist-info/pbr.json b/deps/zeroconf-0.17.6.dist-info/pbr.json new file mode 100644 index 00000000..f2cc4e20 --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": true, "git_version": "0b9093d"} \ No newline at end of file diff --git a/deps/zeroconf-0.17.6.dist-info/top_level.txt b/deps/zeroconf-0.17.6.dist-info/top_level.txt new file mode 100644 index 00000000..947c9c52 --- /dev/null +++ b/deps/zeroconf-0.17.6.dist-info/top_level.txt @@ -0,0 +1 @@ +zeroconf diff --git a/deps/zeroconf.py b/deps/zeroconf.py new file mode 100644 index 00000000..d1211f7c --- /dev/null +++ b/deps/zeroconf.py @@ -0,0 +1,1877 @@ +from __future__ import ( + absolute_import, division, print_function, unicode_literals) + +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +import enum +import errno +import logging +import re +import select +import socket +import struct +import threading +import time +from functools import reduce + +import netifaces +from six import binary_type, indexbytes, int2byte, iteritems, text_type +from six.moves import xrange + +__author__ = 'Paul Scott-Murphy, William McBrine' +__maintainer__ = 'Jakub Stasiak ' +__version__ = '0.17.6' +__license__ = 'LGPL' + + +try: + NullHandler = logging.NullHandler +except AttributeError: + # Python 2.6 fallback + class NullHandler(logging.Handler): + + def emit(self, record): + pass + +__all__ = [ + "__version__", + "Zeroconf", "ServiceInfo", "ServiceBrowser", + "Error", "InterfaceChoice", "ServiceStateChange", +] + + +log = logging.getLogger(__name__) +log.addHandler(NullHandler()) + +if log.level == logging.NOTSET: + log.setLevel(logging.WARN) + +# Some timing constants + +_UNREGISTER_TIME = 125 +_CHECK_TIME = 175 +_REGISTER_TIME = 225 +_LISTENER_TIME = 200 +_BROWSER_TIME = 500 + +# Some DNS constants + +_MDNS_ADDR = '224.0.0.251' +_MDNS_PORT = 5353 +_DNS_PORT = 53 +_DNS_TTL = 60 * 60 # one hour default TTL + +_MAX_MSG_TYPICAL = 1460 # unused +_MAX_MSG_ABSOLUTE = 8972 + +_FLAGS_QR_MASK = 0x8000 # query response mask +_FLAGS_QR_QUERY = 0x0000 # query +_FLAGS_QR_RESPONSE = 0x8000 # response + +_FLAGS_AA = 0x0400 # Authorative answer +_FLAGS_TC = 0x0200 # Truncated +_FLAGS_RD = 0x0100 # Recursion desired +_FLAGS_RA = 0x8000 # Recursion available + +_FLAGS_Z = 0x0040 # Zero +_FLAGS_AD = 0x0020 # Authentic data +_FLAGS_CD = 0x0010 # Checking disabled + +_CLASS_IN = 1 +_CLASS_CS = 2 +_CLASS_CH = 3 +_CLASS_HS = 4 +_CLASS_NONE = 254 +_CLASS_ANY = 255 +_CLASS_MASK = 0x7FFF +_CLASS_UNIQUE = 0x8000 + +_TYPE_A = 1 +_TYPE_NS = 2 +_TYPE_MD = 3 +_TYPE_MF = 4 +_TYPE_CNAME = 5 +_TYPE_SOA = 6 +_TYPE_MB = 7 +_TYPE_MG = 8 +_TYPE_MR = 9 +_TYPE_NULL = 10 +_TYPE_WKS = 11 +_TYPE_PTR = 12 +_TYPE_HINFO = 13 +_TYPE_MINFO = 14 +_TYPE_MX = 15 +_TYPE_TXT = 16 +_TYPE_AAAA = 28 +_TYPE_SRV = 33 +_TYPE_ANY = 255 + +# Mapping constants to names + +_CLASSES = {_CLASS_IN: "in", + _CLASS_CS: "cs", + _CLASS_CH: "ch", + _CLASS_HS: "hs", + _CLASS_NONE: "none", + _CLASS_ANY: "any"} + +_TYPES = {_TYPE_A: "a", + _TYPE_NS: "ns", + _TYPE_MD: "md", + _TYPE_MF: "mf", + _TYPE_CNAME: "cname", + _TYPE_SOA: "soa", + _TYPE_MB: "mb", + _TYPE_MG: "mg", + _TYPE_MR: "mr", + _TYPE_NULL: "null", + _TYPE_WKS: "wks", + _TYPE_PTR: "ptr", + _TYPE_HINFO: "hinfo", + _TYPE_MINFO: "minfo", + _TYPE_MX: "mx", + _TYPE_TXT: "txt", + _TYPE_AAAA: "quada", + _TYPE_SRV: "srv", + _TYPE_ANY: "any"} + +_HAS_A_TO_Z = re.compile(r'[A-Za-z]') +_HAS_ONLY_A_TO_Z_NUM_HYPHEN = re.compile(r'^[A-Za-z0-9\-]+$') + +# utility functions + + +def current_time_millis(): + """Current system time in milliseconds""" + return time.time() * 1000 + + +def service_type_name(type_): + """ + Validate a fully qualified service name, instance or subtype. [rfc6763] + + Returns fully qualified service name. + + Domain names used by mDNS-SD take the following forms: + + . <_tcp|_udp> . local. + . . <_tcp|_udp> . local. + ._sub . . <_tcp|_udp> . local. + + 1) must end with 'local.' + + This is true because we are implementing mDNS and since the 'm' means + multi-cast, the 'local.' domain is mandatory. + + 2) local is preceded with either '_udp.' or '_tcp.' + + 3) service name precedes <_tcp|_udp> + + The rules for Service Names [RFC6335] state that they may be no more + than fifteen characters long (not counting the mandatory underscore), + consisting of only letters, digits, and hyphens, must begin and end + with a letter or digit, must not contain consecutive hyphens, and + must contain at least one letter. + + The instance name and sub type may be up to 63 bytes. + + :param type_: Type, SubType or service name to validate + :return: fully qualified service name (eg: _http._tcp.local.) + """ + if not (type_.endswith('._tcp.local.') or type_.endswith('._udp.local.')): + raise BadTypeInNameException( + "Type must end with '._tcp.local.' or '._udp.local.'") + + if type_.startswith('.'): + raise BadTypeInNameException("Type must not start with '.'") + + remaining = type_[:-len('._tcp.local.')].split('.') + name = remaining.pop() + if not name: + raise BadTypeInNameException("No Service name found") + + if name[0] != '_': + raise BadTypeInNameException("Service name must start with '_'") + + # remove leading underscore + name = name[1:] + + if len(name) > 15: + raise BadTypeInNameException("Service name must be <= 15 bytes") + + if '--' in name: + raise BadTypeInNameException("Service name must not contain '--'") + + if '-' in (name[0], name[-1]): + raise BadTypeInNameException( + "Service name may not start or end with '-'") + + if not _HAS_A_TO_Z.search(name): + raise BadTypeInNameException( + "Service name must contain at least one letter (eg: 'A-Z')") + + if not _HAS_ONLY_A_TO_Z_NUM_HYPHEN.search(name): + raise BadTypeInNameException( + "Service name must contain only these characters: " + "A-Z, a-z, 0-9, hyphen ('-')") + + if remaining and remaining[-1] == '_sub': + remaining.pop() + if len(remaining) == 0: + raise BadTypeInNameException( + "_sub requires a subtype name") + + if len(remaining) > 1: + raise BadTypeInNameException( + "Unexpected characters '%s.'" % '.'.join(remaining[1:])) + + if remaining: + length = len(remaining[0].encode('utf-8')) + if length > 63: + raise BadTypeInNameException("Too long: '%s'" % remaining[0]) + + return '_' + name + type_[-len('._tcp.local.'):] + + +# Exceptions + + +class Error(Exception): + pass + + +class NonLocalNameException(Exception): + pass + + +class NonUniqueNameException(Exception): + pass + + +class NamePartTooLongException(Exception): + pass + + +class AbstractMethodException(Exception): + pass + + +class BadTypeInNameException(Exception): + pass + +# implementation classes + + +class DNSEntry(object): + + """A DNS entry""" + + def __init__(self, name, type_, class_): + self.key = name.lower() + self.name = name + self.type = type_ + self.class_ = class_ & _CLASS_MASK + self.unique = (class_ & _CLASS_UNIQUE) != 0 + + def __eq__(self, other): + """Equality test on name, type, and class""" + return (isinstance(other, DNSEntry) and + self.name == other.name and + self.type == other.type and + self.class_ == other.class_) + + def __ne__(self, other): + """Non-equality test""" + return not self.__eq__(other) + + @staticmethod + def get_class_(class_): + """Class accessor""" + return _CLASSES.get(class_, "?(%s)" % class_) + + @staticmethod + def get_type(t): + """Type accessor""" + return _TYPES.get(t, "?(%s)" % t) + + def to_string(self, hdr, other): + """String representation with additional information""" + result = "%s[%s,%s" % (hdr, self.get_type(self.type), + self.get_class_(self.class_)) + if self.unique: + result += "-unique," + else: + result += "," + result += self.name + if other is not None: + result += ",%s]" % other + else: + result += "]" + return result + + +class DNSQuestion(DNSEntry): + + """A DNS question entry""" + + def __init__(self, name, type_, class_): + DNSEntry.__init__(self, name, type_, class_) + + def answered_by(self, rec): + """Returns true if the question is answered by the record""" + return (self.class_ == rec.class_ and + (self.type == rec.type or self.type == _TYPE_ANY) and + self.name == rec.name) + + def __repr__(self): + """String representation""" + return DNSEntry.to_string(self, "question", None) + + +class DNSRecord(DNSEntry): + + """A DNS record - like a DNS entry, but has a TTL""" + + def __init__(self, name, type_, class_, ttl): + DNSEntry.__init__(self, name, type_, class_) + self.ttl = ttl + self.created = current_time_millis() + + def __eq__(self, other): + """Tests equality as per DNSRecord""" + return isinstance(other, DNSRecord) and DNSEntry.__eq__(self, other) + + def suppressed_by(self, msg): + """Returns true if any answer in a message can suffice for the + information held in this record.""" + for record in msg.answers: + if self.suppressed_by_answer(record): + return True + return False + + def suppressed_by_answer(self, other): + """Returns true if another record has same name, type and class, + and if its TTL is at least half of this record's.""" + return self == other and other.ttl > (self.ttl / 2) + + def get_expiration_time(self, percent): + """Returns the time at which this record will have expired + by a certain percentage.""" + return self.created + (percent * self.ttl * 10) + + def get_remaining_ttl(self, now): + """Returns the remaining TTL in seconds.""" + return max(0, (self.get_expiration_time(100) - now) / 1000.0) + + def is_expired(self, now): + """Returns true if this record has expired.""" + return self.get_expiration_time(100) <= now + + def is_stale(self, now): + """Returns true if this record is at least half way expired.""" + return self.get_expiration_time(50) <= now + + def reset_ttl(self, other): + """Sets this record's TTL and created time to that of + another record.""" + self.created = other.created + self.ttl = other.ttl + + def write(self, out): + """Abstract method""" + raise AbstractMethodException + + def to_string(self, other): + """String representation with additional information""" + arg = "%s/%s,%s" % ( + self.ttl, self.get_remaining_ttl(current_time_millis()), other) + return DNSEntry.to_string(self, "record", arg) + + +class DNSAddress(DNSRecord): + + """A DNS address record""" + + def __init__(self, name, type_, class_, ttl, address): + DNSRecord.__init__(self, name, type_, class_, ttl) + self.address = address + + def write(self, out): + """Used in constructing an outgoing packet""" + out.write_string(self.address) + + def __eq__(self, other): + """Tests equality on address""" + return isinstance(other, DNSAddress) and self.address == other.address + + def __repr__(self): + """String representation""" + try: + return socket.inet_ntoa(self.address) + except Exception as e: # TODO stop catching all Exceptions + log.exception('Unknown error, possibly benign: %r', e) + return self.address + + +class DNSHinfo(DNSRecord): + + """A DNS host information record""" + + def __init__(self, name, type_, class_, ttl, cpu, os): + DNSRecord.__init__(self, name, type_, class_, ttl) + try: + self.cpu = cpu.decode('utf-8') + except AttributeError: + self.cpu = cpu + try: + self.os = os.decode('utf-8') + except AttributeError: + self.os = os + + def write(self, out): + """Used in constructing an outgoing packet""" + out.write_character_string(self.cpu.encode('utf-8')) + out.write_character_string(self.os.encode('utf-8')) + + def __eq__(self, other): + """Tests equality on cpu and os""" + return (isinstance(other, DNSHinfo) and + self.cpu == other.cpu and self.os == other.os) + + def __repr__(self): + """String representation""" + return self.cpu + " " + self.os + + +class DNSPointer(DNSRecord): + + """A DNS pointer record""" + + def __init__(self, name, type_, class_, ttl, alias): + DNSRecord.__init__(self, name, type_, class_, ttl) + self.alias = alias + + def write(self, out): + """Used in constructing an outgoing packet""" + out.write_name(self.alias) + + def __eq__(self, other): + """Tests equality on alias""" + return isinstance(other, DNSPointer) and self.alias == other.alias + + def __repr__(self): + """String representation""" + return self.to_string(self.alias) + + +class DNSText(DNSRecord): + + """A DNS text record""" + + def __init__(self, name, type_, class_, ttl, text): + assert isinstance(text, (bytes, type(None))) + DNSRecord.__init__(self, name, type_, class_, ttl) + self.text = text + + def write(self, out): + """Used in constructing an outgoing packet""" + out.write_string(self.text) + + def __eq__(self, other): + """Tests equality on text""" + return isinstance(other, DNSText) and self.text == other.text + + def __repr__(self): + """String representation""" + if len(self.text) > 10: + return self.to_string(self.text[:7]) + "..." + else: + return self.to_string(self.text) + + +class DNSService(DNSRecord): + + """A DNS service record""" + + def __init__(self, name, type_, class_, ttl, + priority, weight, port, server): + DNSRecord.__init__(self, name, type_, class_, ttl) + self.priority = priority + self.weight = weight + self.port = port + self.server = server + + def write(self, out): + """Used in constructing an outgoing packet""" + out.write_short(self.priority) + out.write_short(self.weight) + out.write_short(self.port) + out.write_name(self.server) + + def __eq__(self, other): + """Tests equality on priority, weight, port and server""" + return (isinstance(other, DNSService) and + self.priority == other.priority and + self.weight == other.weight and + self.port == other.port and + self.server == other.server) + + def __repr__(self): + """String representation""" + return self.to_string("%s:%s" % (self.server, self.port)) + + +class DNSIncoming(object): + + """Object representation of an incoming DNS packet""" + + def __init__(self, data): + """Constructor from string holding bytes of packet""" + self.offset = 0 + self.data = data + self.questions = [] + self.answers = [] + self.id = 0 + self.flags = 0 + self.num_questions = 0 + self.num_answers = 0 + self.num_authorities = 0 + self.num_additionals = 0 + + self.read_header() + self.read_questions() + self.read_others() + + def unpack(self, format_): + length = struct.calcsize(format_) + info = struct.unpack( + format_, self.data[self.offset:self.offset + length]) + self.offset += length + return info + + def read_header(self): + """Reads header portion of packet""" + (self.id, self.flags, self.num_questions, self.num_answers, + self.num_authorities, self.num_additionals) = self.unpack(b'!6H') + + def read_questions(self): + """Reads questions section of packet""" + for i in xrange(self.num_questions): + name = self.read_name() + type_, class_ = self.unpack(b'!HH') + + question = DNSQuestion(name, type_, class_) + self.questions.append(question) + + def read_int(self): + """Reads an integer from the packet""" + return self.unpack(b'!I')[0] + + def read_character_string(self): + """Reads a character string from the packet""" + length = indexbytes(self.data, self.offset) + self.offset += 1 + return self.read_string(length) + + def read_string(self, length): + """Reads a string of a given length from the packet""" + info = self.data[self.offset:self.offset + length] + self.offset += length + return info + + def read_unsigned_short(self): + """Reads an unsigned short from the packet""" + return self.unpack(b'!H')[0] + + def read_others(self): + """Reads the answers, authorities and additionals section of the + packet""" + n = self.num_answers + self.num_authorities + self.num_additionals + for i in xrange(n): + domain = self.read_name() + type_, class_, ttl, length = self.unpack(b'!HHiH') + + rec = None + if type_ == _TYPE_A: + rec = DNSAddress( + domain, type_, class_, ttl, self.read_string(4)) + elif type_ == _TYPE_CNAME or type_ == _TYPE_PTR: + rec = DNSPointer( + domain, type_, class_, ttl, self.read_name()) + elif type_ == _TYPE_TXT: + rec = DNSText( + domain, type_, class_, ttl, self.read_string(length)) + elif type_ == _TYPE_SRV: + rec = DNSService( + domain, type_, class_, ttl, + self.read_unsigned_short(), self.read_unsigned_short(), + self.read_unsigned_short(), self.read_name()) + elif type_ == _TYPE_HINFO: + rec = DNSHinfo( + domain, type_, class_, ttl, + self.read_character_string(), self.read_character_string()) + elif type_ == _TYPE_AAAA: + rec = DNSAddress( + domain, type_, class_, ttl, self.read_string(16)) + else: + # Try to ignore types we don't know about + # Skip the payload for the resource record so the next + # records can be parsed correctly + self.offset += length + + if rec is not None: + self.answers.append(rec) + + def is_query(self): + """Returns true if this is a query""" + return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY + + def is_response(self): + """Returns true if this is a response""" + return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE + + def read_utf(self, offset, length): + """Reads a UTF-8 string of a given length from the packet""" + return text_type(self.data[offset:offset + length], 'utf-8', 'replace') + + def read_name(self): + """Reads a domain name from the packet""" + result = '' + off = self.offset + next_ = -1 + first = off + + while True: + length = indexbytes(self.data, off) + off += 1 + if length == 0: + break + t = length & 0xC0 + if t == 0x00: + result = ''.join((result, self.read_utf(off, length) + '.')) + off += length + elif t == 0xC0: + if next_ < 0: + next_ = off + 1 + off = ((length & 0x3F) << 8) | indexbytes(self.data, off) + if off >= first: + # TODO raise more specific exception + raise Exception("Bad domain name (circular) at %s" % (off,)) + first = off + else: + # TODO raise more specific exception + raise Exception("Bad domain name at %s" % (off,)) + + if next_ >= 0: + self.offset = next_ + else: + self.offset = off + + return result + + +class DNSOutgoing(object): + + """Object representation of an outgoing packet""" + + def __init__(self, flags, multicast=True): + self.finished = False + self.id = 0 + self.multicast = multicast + self.flags = flags + self.names = {} + self.data = [] + self.size = 12 + + self.questions = [] + self.answers = [] + self.authorities = [] + self.additionals = [] + + def add_question(self, record): + """Adds a question""" + self.questions.append(record) + + def add_answer(self, inp, record): + """Adds an answer""" + if not record.suppressed_by(inp): + self.add_answer_at_time(record, 0) + + def add_answer_at_time(self, record, now): + """Adds an answer if if does not expire by a certain time""" + if record is not None: + if now == 0 or not record.is_expired(now): + self.answers.append((record, now)) + + def add_authorative_answer(self, record): + """Adds an authoritative answer""" + self.authorities.append(record) + + def add_additional_answer(self, record): + """Adds an additional answer""" + self.additionals.append(record) + + def pack(self, format_, value): + self.data.append(struct.pack(format_, value)) + self.size += struct.calcsize(format_) + + def write_byte(self, value): + """Writes a single byte to the packet""" + self.pack(b'!c', int2byte(value)) + + def insert_short(self, index, value): + """Inserts an unsigned short in a certain position in the packet""" + self.data.insert(index, struct.pack(b'!H', value)) + self.size += 2 + + def write_short(self, value): + """Writes an unsigned short to the packet""" + self.pack(b'!H', value) + + def write_int(self, value): + """Writes an unsigned integer to the packet""" + self.pack(b'!I', int(value)) + + def write_string(self, value): + """Writes a string to the packet""" + assert isinstance(value, bytes) + self.data.append(value) + self.size += len(value) + + def write_utf(self, s): + """Writes a UTF-8 string of a given length to the packet""" + utfstr = s.encode('utf-8') + length = len(utfstr) + if length > 64: + raise NamePartTooLongException + self.write_byte(length) + self.write_string(utfstr) + + def write_character_string(self, value): + assert isinstance(value, bytes) + length = len(value) + if length > 256: + raise NamePartTooLongException + self.write_byte(length) + self.write_string(value) + + def write_name(self, name): + """Writes a domain name to the packet""" + + if name in self.names: + # Find existing instance of this name in packet + # + index = self.names[name] + + # An index was found, so write a pointer to it + # + self.write_byte((index >> 8) | 0xC0) + self.write_byte(index & 0xFF) + else: + # No record of this name already, so write it + # out as normal, recording the location of the name + # for future pointers to it. + # + self.names[name] = self.size + parts = name.split('.') + if parts[-1] == '': + parts = parts[:-1] + for part in parts: + self.write_utf(part) + self.write_byte(0) + + def write_question(self, question): + """Writes a question to the packet""" + self.write_name(question.name) + self.write_short(question.type) + self.write_short(question.class_) + + def write_record(self, record, now): + """Writes a record (answer, authoritative answer, additional) to + the packet""" + self.write_name(record.name) + self.write_short(record.type) + if record.unique and self.multicast: + self.write_short(record.class_ | _CLASS_UNIQUE) + else: + self.write_short(record.class_) + if now == 0: + self.write_int(record.ttl) + else: + self.write_int(record.get_remaining_ttl(now)) + index = len(self.data) + # Adjust size for the short we will write before this record + # + self.size += 2 + record.write(self) + self.size -= 2 + + length = len(b''.join(self.data[index:])) + self.insert_short(index, length) # Here is the short we adjusted for + + def packet(self): + """Returns a string containing the packet's bytes + + No further parts should be added to the packet once this + is done.""" + if not self.finished: + self.finished = True + for question in self.questions: + self.write_question(question) + for answer, time_ in self.answers: + self.write_record(answer, time_) + for authority in self.authorities: + self.write_record(authority, 0) + for additional in self.additionals: + self.write_record(additional, 0) + + self.insert_short(0, len(self.additionals)) + self.insert_short(0, len(self.authorities)) + self.insert_short(0, len(self.answers)) + self.insert_short(0, len(self.questions)) + self.insert_short(0, self.flags) + if self.multicast: + self.insert_short(0, 0) + else: + self.insert_short(0, self.id) + return b''.join(self.data) + + +class DNSCache(object): + + """A cache of DNS entries""" + + def __init__(self): + self.cache = {} + + def add(self, entry): + """Adds an entry""" + self.cache.setdefault(entry.key, []).append(entry) + + def remove(self, entry): + """Removes an entry""" + try: + list_ = self.cache[entry.key] + list_.remove(entry) + except (KeyError, ValueError): + pass + + def get(self, entry): + """Gets an entry by key. Will return None if there is no + matching entry.""" + try: + list_ = self.cache[entry.key] + for cached_entry in list_: + if entry.__eq__(cached_entry): + return cached_entry + except (KeyError, ValueError): + return None + + def get_by_details(self, name, type_, class_): + """Gets an entry by details. Will return None if there is + no matching entry.""" + entry = DNSEntry(name, type_, class_) + return self.get(entry) + + def entries_with_name(self, name): + """Returns a list of entries whose key matches the name.""" + try: + return self.cache[name] + except KeyError: + return [] + + def entries(self): + """Returns a list of all entries""" + if not self.cache: + return [] + else: + # avoid size change during iteration by copying the cache + values = list(self.cache.values()) + return reduce(lambda a, b: a + b, values) + + +class Engine(threading.Thread): + + """An engine wraps read access to sockets, allowing objects that + need to receive data from sockets to be called back when the + sockets are ready. + + A reader needs a handle_read() method, which is called when the socket + it is interested in is ready for reading. + + Writers are not implemented here, because we only send short + packets. + """ + + def __init__(self, zc): + threading.Thread.__init__(self, name='zeroconf-Engine') + self.daemon = True + self.zc = zc + self.readers = {} # maps socket to reader + self.timeout = 5 + self.condition = threading.Condition() + self.start() + + def run(self): + while not self.zc.done: + with self.condition: + rs = self.readers.keys() + if len(rs) == 0: + # No sockets to manage, but we wait for the timeout + # or addition of a socket + self.condition.wait(self.timeout) + + if len(rs) != 0: + try: + rr, wr, er = select.select(rs, [], [], self.timeout) + if not self.zc.done: + for socket_ in rr: + reader = self.readers.get(socket_) + if reader: + reader.handle_read(socket_) + + except socket.error as e: + # If the socket was closed by another thread, during + # shutdown, ignore it and exit + if e.errno != socket.EBADF or not self.zc.done: + raise + + def add_reader(self, reader, socket_): + with self.condition: + self.readers[socket_] = reader + self.condition.notify() + + def del_reader(self, socket_): + with self.condition: + del self.readers[socket_] + self.condition.notify() + + +class Listener(object): + + """A Listener is used by this module to listen on the multicast + group to which DNS messages are sent, allowing the implementation + to cache information as it arrives. + + It requires registration with an Engine object in order to have + the read() method called when a socket is available for reading.""" + + def __init__(self, zc): + self.zc = zc + self.data = None + + def handle_read(self, socket_): + data, (addr, port) = socket_.recvfrom(_MAX_MSG_ABSOLUTE) + log.debug('Received %r from %r:%r', data, addr, port) + + self.data = data + msg = DNSIncoming(data) + if msg.is_query(): + # Always multicast responses + # + if port == _MDNS_PORT: + self.zc.handle_query(msg, _MDNS_ADDR, _MDNS_PORT) + # If it's not a multicast query, reply via unicast + # and multicast + # + elif port == _DNS_PORT: + self.zc.handle_query(msg, addr, port) + self.zc.handle_query(msg, _MDNS_ADDR, _MDNS_PORT) + else: + self.zc.handle_response(msg) + + +class Reaper(threading.Thread): + + """A Reaper is used by this module to remove cache entries that + have expired.""" + + def __init__(self, zc): + threading.Thread.__init__(self, name='zeroconf-Reaper') + self.daemon = True + self.zc = zc + self.start() + + def run(self): + while True: + self.zc.wait(10 * 1000) + if self.zc.done: + return + now = current_time_millis() + for record in self.zc.cache.entries(): + if record.is_expired(now): + self.zc.update_record(now, record) + self.zc.cache.remove(record) + + +class Signal(object): + def __init__(self): + self._handlers = [] + + def fire(self, **kwargs): + for h in list(self._handlers): + h(**kwargs) + + @property + def registration_interface(self): + return SignalRegistrationInterface(self._handlers) + + +class SignalRegistrationInterface(object): + + def __init__(self, handlers): + self._handlers = handlers + + def register_handler(self, handler): + self._handlers.append(handler) + return self + + def unregister_handler(self, handler): + self._handlers.remove(handler) + return self + + +class ServiceBrowser(threading.Thread): + + """Used to browse for a service of a specific type. + + The listener object will have its add_service() and + remove_service() methods called when this browser + discovers changes in the services availability.""" + + def __init__(self, zc, type_, handlers=None, listener=None): + """Creates a browser for a specific type""" + assert handlers or listener, 'You need to specify at least one handler' + if not type_.endswith(service_type_name(type_)): + raise BadTypeInNameException + threading.Thread.__init__( + self, name='zeroconf-ServiceBrowser_' + type_) + self.daemon = True + self.zc = zc + self.type = type_ + self.services = {} + self.next_time = current_time_millis() + self.delay = _BROWSER_TIME + self._handlers_to_call = [] + + self._service_state_changed = Signal() + + self.done = False + + if hasattr(handlers, 'add_service'): + listener = handlers + handlers = None + + handlers = handlers or [] + + if listener: + def on_change(zeroconf, service_type, name, state_change): + args = (zeroconf, service_type, name) + if state_change is ServiceStateChange.Added: + listener.add_service(*args) + elif state_change is ServiceStateChange.Removed: + listener.remove_service(*args) + else: + raise NotImplementedError(state_change) + handlers.append(on_change) + + for h in handlers: + self.service_state_changed.register_handler(h) + + self.start() + + @property + def service_state_changed(self): + return self._service_state_changed.registration_interface + + def update_record(self, zc, now, record): + """Callback invoked by Zeroconf when new information arrives. + + Updates information required by browser in the Zeroconf cache.""" + + def enqueue_callback(state_change, name): + self._handlers_to_call.append( + lambda zeroconf: self._service_state_changed.fire( + zeroconf=zeroconf, + service_type=self.type, + name=name, + state_change=state_change, + )) + + if record.type == _TYPE_PTR and record.name == self.type: + expired = record.is_expired(now) + service_key = record.alias.lower() + try: + old_record = self.services[service_key] + except KeyError: + if not expired: + self.services[service_key] = record + enqueue_callback(ServiceStateChange.Added, record.alias) + else: + if not expired: + old_record.reset_ttl(record) + else: + del self.services[service_key] + enqueue_callback(ServiceStateChange.Removed, record.alias) + return + + expires = record.get_expiration_time(75) + if expires < self.next_time: + self.next_time = expires + + def cancel(self): + self.done = True + self.zc.remove_listener(self) + self.join() + + def run(self): + self.zc.add_listener(self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) + + while True: + now = current_time_millis() + if len(self._handlers_to_call) == 0 and self.next_time > now: + self.zc.wait(self.next_time - now) + if self.zc.done or self.done: + return + now = current_time_millis() + + if self.next_time <= now: + out = DNSOutgoing(_FLAGS_QR_QUERY) + out.add_question(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) + for record in self.services.values(): + if not record.is_expired(now): + out.add_answer_at_time(record, now) + self.zc.send(out) + self.next_time = now + self.delay + self.delay = min(20 * 1000, self.delay * 2) + + if len(self._handlers_to_call) > 0 and not self.zc.done: + handler = self._handlers_to_call.pop(0) + handler(self.zc) + + +class ServiceInfo(object): + + """Service information""" + + def __init__(self, type_, name, address=None, port=None, weight=0, + priority=0, properties=None, server=None): + """Create a service description. + + type_: fully qualified service type name + name: fully qualified service name + address: IP address as unsigned short, network byte order + port: port that the service runs on + weight: weight of the service + priority: priority of the service + properties: dictionary of properties (or a string holding the + bytes for the text field) + server: fully qualified name for service host (defaults to name)""" + + if not type_.endswith(service_type_name(name)): + raise BadTypeInNameException + self.type = type_ + self.name = name + self.address = address + self.port = port + self.weight = weight + self.priority = priority + if server: + self.server = server + else: + self.server = name + self._properties = {} + self._set_properties(properties) + + @property + def properties(self): + return self._properties + + def _set_properties(self, properties): + """Sets properties and text of this info from a dictionary""" + if isinstance(properties, dict): + self._properties = properties + list_ = [] + result = b'' + for key, value in iteritems(properties): + if isinstance(key, text_type): + key = key.encode('utf-8') + + if value is None: + suffix = b'' + elif isinstance(value, text_type): + suffix = value.encode('utf-8') + elif isinstance(value, binary_type): + suffix = value + elif isinstance(value, int): + if value: + suffix = b'true' + else: + suffix = b'false' + else: + suffix = b'' + list_.append(b'='.join((key, suffix))) + for item in list_: + result = b''.join((result, int2byte(len(item)), item)) + self.text = result + else: + self.text = properties + + def _set_text(self, text): + """Sets properties and text given a text field""" + self.text = text + result = {} + end = len(text) + index = 0 + strs = [] + while index < end: + length = indexbytes(text, index) + index += 1 + strs.append(text[index:index + length]) + index += length + + for s in strs: + parts = s.split(b'=', 1) + try: + key, value = parts + except ValueError: + # No equals sign at all + key = s + value = False + else: + if value == b'true': + value = True + elif value == b'false' or not value: + value = False + + # Only update non-existent properties + if key and result.get(key) is None: + result[key] = value + + self._properties = result + + def get_name(self): + """Name accessor""" + if self.type is not None and self.name.endswith("." + self.type): + return self.name[:len(self.name) - len(self.type) - 1] + return self.name + + def update_record(self, zc, now, record): + """Updates service information from a DNS record""" + if record is not None and not record.is_expired(now): + if record.type == _TYPE_A: + # if record.name == self.name: + if record.name == self.server: + self.address = record.address + elif record.type == _TYPE_SRV: + if record.name == self.name: + self.server = record.server + self.port = record.port + self.weight = record.weight + self.priority = record.priority + # self.address = None + self.update_record( + zc, now, zc.cache.get_by_details( + self.server, _TYPE_A, _CLASS_IN)) + elif record.type == _TYPE_TXT: + if record.name == self.name: + self._set_text(record.text) + + def request(self, zc, timeout): + """Returns true if the service could be discovered on the + network, and updates this object with details discovered. + """ + now = current_time_millis() + delay = _LISTENER_TIME + next_ = now + delay + last = now + timeout + + record_types_for_check_cache = [ + (_TYPE_SRV, _CLASS_IN), + (_TYPE_TXT, _CLASS_IN), + ] + if self.server is not None: + record_types_for_check_cache.append((_TYPE_A, _CLASS_IN)) + for record_type in record_types_for_check_cache: + cached = zc.cache.get_by_details(self.name, *record_type) + if cached: + self.update_record(zc, now, cached) + + if None not in (self.server, self.address, self.text): + return True + + try: + zc.add_listener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)) + while None in (self.server, self.address, self.text): + if last <= now: + return False + if next_ <= now: + out = DNSOutgoing(_FLAGS_QR_QUERY) + out.add_question( + DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN)) + out.add_answer_at_time( + zc.cache.get_by_details( + self.name, _TYPE_SRV, _CLASS_IN), now) + + out.add_question( + DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN)) + out.add_answer_at_time( + zc.cache.get_by_details( + self.name, _TYPE_TXT, _CLASS_IN), now) + + if self.server is not None: + out.add_question( + DNSQuestion(self.server, _TYPE_A, _CLASS_IN)) + out.add_answer_at_time( + zc.cache.get_by_details( + self.server, _TYPE_A, _CLASS_IN), now) + zc.send(out) + next_ = now + delay + delay *= 2 + + zc.wait(min(next_, last) - now) + now = current_time_millis() + finally: + zc.remove_listener(self) + + return True + + def __eq__(self, other): + """Tests equality of service name""" + if isinstance(other, ServiceInfo): + return other.name == self.name + return False + + def __ne__(self, other): + """Non-equality test""" + return not self.__eq__(other) + + def __repr__(self): + """String representation""" + return '%s(%s)' % ( + type(self).__name__, + ', '.join( + '%s=%r' % (name, getattr(self, name)) + for name in ( + 'type', 'name', 'address', 'port', 'weight', 'priority', + 'server', 'properties', + ) + ) + ) + + +class ZeroconfServiceTypes(object): + """ + Return all of the advertised services on any local networks + """ + def __init__(self): + self.found_services = set() + + def add_service(self, zc, type_, name): + self.found_services.add(name) + + def remove_service(self, zc, type_, name): + pass + + @classmethod + def find(cls, zc=None, timeout=5): + """ + Return all of the advertised services on any local networks. + + :param zc: Zeroconf() instance. Pass in if already have an + instance running or if non-default interfaces are needed + :param timeout: seconds to wait for any responses + :return: tuple of service type strings + """ + local_zc = zc or Zeroconf() + listener = cls() + browser = ServiceBrowser( + local_zc, '_services._dns-sd._udp.local.', listener=listener) + + # wait for responses + time.sleep(timeout) + + # close down anything we opened + if zc is None: + local_zc.close() + else: + browser.cancel() + + return tuple(sorted(listener.found_services)) + + +@enum.unique +class InterfaceChoice(enum.Enum): + Default = 1 + All = 2 + + +@enum.unique +class ServiceStateChange(enum.Enum): + Added = 1 + Removed = 2 + + +HOST_ONLY_NETWORK_MASK = '255.255.255.255' + + +def get_all_addresses(address_family): + return list(set( + addr['addr'] + for iface in netifaces.interfaces() + for addr in netifaces.ifaddresses(iface).get(address_family, []) + if addr.get('netmask') != HOST_ONLY_NETWORK_MASK + )) + + +def normalize_interface_choice(choice, address_family): + if choice is InterfaceChoice.Default: + choice = ['0.0.0.0'] + elif choice is InterfaceChoice.All: + choice = get_all_addresses(address_family) + return choice + + +def new_socket(): + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + # SO_REUSEADDR should be equivalent to SO_REUSEPORT for + # multicast UDP sockets (p 731, "TCP/IP Illustrated, + # Volume 2"), but some BSD-derived systems require + # SO_REUSEPORT to be specified explicity. Also, not all + # versions of Python have SO_REUSEPORT available. + # Catch OSError and socket.error for kernel versions <3.9 because lacking + # SO_REUSEPORT support. + try: + reuseport = socket.SO_REUSEPORT + except AttributeError: + pass + else: + try: + s.setsockopt(socket.SOL_SOCKET, reuseport, 1) + except (OSError, socket.error) as err: + # OSError on python 3, socket.error on python 2 + if not err.errno == errno.ENOPROTOOPT: + raise + + # OpenBSD needs the ttl and loop values for the IP_MULTICAST_TTL and + # IP_MULTICAST_LOOP socket options as an unsigned char. + ttl = struct.pack(b'B', 255) + s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) + loop = struct.pack(b'B', 1) + s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, loop) + + s.bind(('', _MDNS_PORT)) + return s + + +def get_errno(e): + assert isinstance(e, socket.error) + return e.args[0] + + +class Zeroconf(object): + + """Implementation of Zeroconf Multicast DNS Service Discovery + + Supports registration, unregistration, queries and browsing. + """ + + def __init__( + self, + interfaces=InterfaceChoice.All, + ): + """Creates an instance of the Zeroconf class, establishing + multicast communications, listening and reaping threads. + + :type interfaces: :class:`InterfaceChoice` or sequence of ip addresses + """ + # hook for threads + self._GLOBAL_DONE = False + + self._listen_socket = new_socket() + interfaces = normalize_interface_choice(interfaces, socket.AF_INET) + + self._respond_sockets = [] + + for i in interfaces: + log.debug('Adding %r to multicast group', i) + try: + self._listen_socket.setsockopt( + socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, + socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(i)) + except socket.error as e: + if get_errno(e) == errno.EADDRINUSE: + log.info( + 'Address in use when adding %s to multicast group, ' + 'it is expected to happen on some systems', i, + ) + elif get_errno(e) == errno.EADDRNOTAVAIL: + log.info( + 'Address not available when adding %s to multicast ' + 'group, it is expected to happen on some systems', i, + ) + continue + else: + raise + + respond_socket = new_socket() + respond_socket.setsockopt( + socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(i)) + + self._respond_sockets.append(respond_socket) + + self.listeners = [] + self.browsers = {} + self.services = {} + self.servicetypes = {} + + self.cache = DNSCache() + + self.condition = threading.Condition() + + self.engine = Engine(self) + self.listener = Listener(self) + self.engine.add_reader(self.listener, self._listen_socket) + self.reaper = Reaper(self) + + self.debug = None + + @property + def done(self): + return self._GLOBAL_DONE + + def wait(self, timeout): + """Calling thread waits for a given number of milliseconds or + until notified.""" + with self.condition: + self.condition.wait(timeout / 1000.0) + + def notify_all(self): + """Notifies all waiting threads""" + with self.condition: + self.condition.notify_all() + + def get_service_info(self, type_, name, timeout=3000): + """Returns network's service information for a particular + name and type, or None if no service matches by the timeout, + which defaults to 3 seconds.""" + info = ServiceInfo(type_, name) + if info.request(self, timeout): + return info + return None + + def add_service_listener(self, type_, listener): + """Adds a listener for a particular service type. This object + will then have its update_record method called when information + arrives for that type.""" + self.remove_service_listener(listener) + self.browsers[listener] = ServiceBrowser(self, type_, listener) + + def remove_service_listener(self, listener): + """Removes a listener from the set that is currently listening.""" + if listener in self.browsers: + self.browsers[listener].cancel() + del self.browsers[listener] + + def remove_all_service_listeners(self): + """Removes a listener from the set that is currently listening.""" + for listener in [k for k in self.browsers]: + self.remove_service_listener(listener) + + def register_service(self, info, ttl=_DNS_TTL): + """Registers service information to the network with a default TTL + of 60 seconds. Zeroconf will then respond to requests for + information for that service. The name of the service may be + changed if needed to make it unique on the network.""" + self.check_service(info) + self.services[info.name.lower()] = info + if info.type in self.servicetypes: + self.servicetypes[info.type] += 1 + else: + self.servicetypes[info.type] = 1 + now = current_time_millis() + next_time = now + i = 0 + while i < 3: + if now < next_time: + self.wait(next_time - now) + now = current_time_millis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.add_answer_at_time( + DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0) + out.add_answer_at_time( + DNSService(info.name, _TYPE_SRV, _CLASS_IN, + ttl, info.priority, info.weight, info.port, + info.server), 0) + + out.add_answer_at_time( + DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0) + if info.address: + out.add_answer_at_time( + DNSAddress(info.server, _TYPE_A, _CLASS_IN, + ttl, info.address), 0) + self.send(out) + i += 1 + next_time += _REGISTER_TIME + + def unregister_service(self, info): + """Unregister a service.""" + try: + del self.services[info.name.lower()] + if self.servicetypes[info.type] > 1: + self.servicetypes[info.type] -= 1 + else: + del self.servicetypes[info.type] + except Exception as e: # TODO stop catching all Exceptions + log.exception('Unknown error, possibly benign: %r', e) + now = current_time_millis() + next_time = now + i = 0 + while i < 3: + if now < next_time: + self.wait(next_time - now) + now = current_time_millis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.add_answer_at_time( + DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) + out.add_answer_at_time( + DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, + info.priority, info.weight, info.port, info.name), 0) + out.add_answer_at_time( + DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0) + + if info.address: + out.add_answer_at_time( + DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, + info.address), 0) + self.send(out) + i += 1 + next_time += _UNREGISTER_TIME + + def unregister_all_services(self): + """Unregister all registered services.""" + if len(self.services) > 0: + now = current_time_millis() + next_time = now + i = 0 + while i < 3: + if now < next_time: + self.wait(next_time - now) + now = current_time_millis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + for info in self.services.values(): + out.add_answer_at_time(DNSPointer( + info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) + out.add_answer_at_time(DNSService( + info.name, _TYPE_SRV, _CLASS_IN, 0, + info.priority, info.weight, info.port, info.server), 0) + out.add_answer_at_time(DNSText( + info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0) + if info.address: + out.add_answer_at_time(DNSAddress( + info.server, _TYPE_A, _CLASS_IN, 0, + info.address), 0) + self.send(out) + i += 1 + next_time += _UNREGISTER_TIME + + def check_service(self, info): + """Checks the network for a unique service name, modifying the + ServiceInfo passed in if it is not unique.""" + now = current_time_millis() + next_time = now + i = 0 + while i < 3: + for record in self.cache.entries_with_name(info.type): + if (record.type == _TYPE_PTR and + not record.is_expired(now) and + record.alias == info.name): + if info.name.find('.') < 0: + info.name = '%s.[%s:%s].%s' % ( + info.name, info.address, info.port, info.type) + + self.check_service(info) + return + raise NonUniqueNameException + if now < next_time: + self.wait(next_time - now) + now = current_time_millis() + continue + out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA) + self.debug = out + out.add_question(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN)) + out.add_authorative_answer(DNSPointer( + info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) + self.send(out) + i += 1 + next_time += _CHECK_TIME + + def add_listener(self, listener, question): + """Adds a listener for a given question. The listener will have + its update_record method called when information is available to + answer the question.""" + now = current_time_millis() + self.listeners.append(listener) + if question is not None: + for record in self.cache.entries_with_name(question.name): + if question.answered_by(record) and not record.is_expired(now): + listener.update_record(self, now, record) + self.notify_all() + + def remove_listener(self, listener): + """Removes a listener.""" + try: + self.listeners.remove(listener) + self.notify_all() + except Exception as e: # TODO stop catching all Exceptions + log.exception('Unknown error, possibly benign: %r', e) + + def update_record(self, now, rec): + """Used to notify listeners of new information that has updated + a record.""" + for listener in self.listeners: + listener.update_record(self, now, rec) + self.notify_all() + + def handle_response(self, msg): + """Deal with incoming response packets. All answers + are held in the cache, and listeners are notified.""" + now = current_time_millis() + for record in msg.answers: + expired = record.is_expired(now) + if record in self.cache.entries(): + if expired: + self.cache.remove(record) + else: + entry = self.cache.get(record) + if entry is not None: + entry.reset_ttl(record) + else: + self.cache.add(record) + + for record in msg.answers: + self.update_record(now, record) + + def handle_query(self, msg, addr, port): + """Deal with incoming query packets. Provides a response if + possible.""" + out = None + + # Support unicast client responses + # + if port != _MDNS_PORT: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, False) + for question in msg.questions: + out.add_question(question) + + for question in msg.questions: + if question.type == _TYPE_PTR: + if question.name == "_services._dns-sd._udp.local.": + for stype in self.servicetypes.keys(): + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.add_answer(msg, DNSPointer( + "_services._dns-sd._udp.local.", _TYPE_PTR, + _CLASS_IN, _DNS_TTL, stype)) + for service in self.services.values(): + if question.name == service.type: + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.add_answer(msg, DNSPointer( + service.type, _TYPE_PTR, + _CLASS_IN, _DNS_TTL, service.name)) + else: + try: + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + + # Answer A record queries for any service addresses we know + if question.type in (_TYPE_A, _TYPE_ANY): + for service in self.services.values(): + if service.server == question.name.lower(): + out.add_answer(msg, DNSAddress( + question.name, _TYPE_A, + _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.address)) + + service = self.services.get(question.name.lower(), None) + if not service: + continue + + if question.type in (_TYPE_SRV, _TYPE_ANY): + out.add_answer(msg, DNSService( + question.name, _TYPE_SRV, _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.priority, service.weight, + service.port, service.server)) + if question.type in (_TYPE_TXT, _TYPE_ANY): + out.add_answer(msg, DNSText( + question.name, _TYPE_TXT, _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.text)) + if question.type == _TYPE_SRV: + out.add_additional_answer(DNSAddress( + service.server, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, + _DNS_TTL, service.address)) + except Exception as e: # TODO stop catching all Exceptions + log.exception('Unknown error, possibly benign: %r', e) + + if out is not None and out.answers: + out.id = msg.id + self.send(out, addr, port) + + def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT): + """Sends an outgoing packet.""" + packet = out.packet() + log.debug('Sending %r as %r...', out, packet) + for s in self._respond_sockets: + if self._GLOBAL_DONE: + return + bytes_sent = s.sendto(packet, 0, (addr, port)) + if bytes_sent != len(packet): + raise Error( + 'Should not happen, sent %d out of %d bytes' % ( + bytes_sent, len(packet))) + + def close(self): + """Ends the background threads, and prevent this instance from + servicing further queries.""" + if not self._GLOBAL_DONE: + self._GLOBAL_DONE = True + # remove service listeners + self.remove_all_service_listeners() + self.unregister_all_services() + + # shutdown recv socket and thread + self.engine.del_reader(self._listen_socket) + self._listen_socket.close() + self.engine.join() + + # shutdown the rest + self.notify_all() + self.reaper.join() + for s in self._respond_sockets: + s.close()