Best Python code snippet using fMBT_python
curl_httpclient.py
Source:curl_httpclient.py  
1#!/usr/bin/env python2#3# Copyright 2009 Facebook4#5# Licensed under the Apache License, Version 2.0 (the "License"); you may6# not use this file except in compliance with the License. You may obtain7# a copy of the License at8#9#     http://www.apache.org/licenses/LICENSE-2.010#11# Unless required by applicable law or agreed to in writing, software12# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the14# License for the specific language governing permissions and limitations15# under the License.16"""Non-blocking HTTP client implementation using pycurl."""17from __future__ import absolute_import, division, print_function, with_statement18import collections19import functools20import logging21import pycurl  # type: ignore22import threading23import time24from io import BytesIO25from tornado import httputil26from tornado import ioloop27from tornado import stack_context28from tornado.escape import utf8, native_str29from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main30curl_log = logging.getLogger('tornado.curl_httpclient')31class CurlAsyncHTTPClient(AsyncHTTPClient):32    def initialize(self, io_loop, max_clients=10, defaults=None):33        super(CurlAsyncHTTPClient, self).initialize(io_loop, defaults=defaults)34        self._multi = pycurl.CurlMulti()35        self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)36        self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)37        self._curls = [self._curl_create() for i in range(max_clients)]38        self._free_list = self._curls[:]39        self._requests = collections.deque()40        self._fds = {}41        self._timeout = None42        # libcurl has bugs that sometimes cause it to not report all43        # relevant file descriptors and timeouts to TIMERFUNCTION/44        # SOCKETFUNCTION.  Mitigate the effects of such bugs by45        # forcing a periodic scan of all active requests.46        self._force_timeout_callback = ioloop.PeriodicCallback(47            self._handle_force_timeout, 1000, io_loop=io_loop)48        self._force_timeout_callback.start()49        # Work around a bug in libcurl 7.29.0: Some fields in the curl50        # multi object are initialized lazily, and its destructor will51        # segfault if it is destroyed without having been used.  Add52        # and remove a dummy handle to make sure everything is53        # initialized.54        dummy_curl_handle = pycurl.Curl()55        self._multi.add_handle(dummy_curl_handle)56        self._multi.remove_handle(dummy_curl_handle)57    def close(self):58        self._force_timeout_callback.stop()59        if self._timeout is not None:60            self.io_loop.remove_timeout(self._timeout)61        for curl in self._curls:62            curl.close()63        self._multi.close()64        super(CurlAsyncHTTPClient, self).close()65    def fetch_impl(self, request, callback):66        self._requests.append((request, callback))67        self._process_queue()68        self._set_timeout(0)69    def _handle_socket(self, event, fd, multi, data):70        """Called by libcurl when it wants to change the file descriptors71        it cares about.72        """73        event_map = {74            pycurl.POLL_NONE: ioloop.IOLoop.NONE,75            pycurl.POLL_IN: ioloop.IOLoop.READ,76            pycurl.POLL_OUT: ioloop.IOLoop.WRITE,77            pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE78        }79        if event == pycurl.POLL_REMOVE:80            if fd in self._fds:81                self.io_loop.remove_handler(fd)82                del self._fds[fd]83        else:84            ioloop_event = event_map[event]85            # libcurl sometimes closes a socket and then opens a new86            # one using the same FD without giving us a POLL_NONE in87            # between.  This is a problem with the epoll IOLoop,88            # because the kernel can tell when a socket is closed and89            # removes it from the epoll automatically, causing future90            # update_handler calls to fail.  Since we can't tell when91            # this has happened, always use remove and re-add92            # instead of update.93            if fd in self._fds:94                self.io_loop.remove_handler(fd)95            self.io_loop.add_handler(fd, self._handle_events,96                                     ioloop_event)97            self._fds[fd] = ioloop_event98    def _set_timeout(self, msecs):99        """Called by libcurl to schedule a timeout."""100        if self._timeout is not None:101            self.io_loop.remove_timeout(self._timeout)102        self._timeout = self.io_loop.add_timeout(103            self.io_loop.time() + msecs / 1000.0, self._handle_timeout)104    def _handle_events(self, fd, events):105        """Called by IOLoop when there is activity on one of our106        file descriptors.107        """108        action = 0109        if events & ioloop.IOLoop.READ:110            action |= pycurl.CSELECT_IN111        if events & ioloop.IOLoop.WRITE:112            action |= pycurl.CSELECT_OUT113        while True:114            try:115                ret, num_handles = self._multi.socket_action(fd, action)116            except pycurl.error as e:117                ret = e.args[0]118            if ret != pycurl.E_CALL_MULTI_PERFORM:119                break120        self._finish_pending_requests()121    def _handle_timeout(self):122        """Called by IOLoop when the requested timeout has passed."""123        with stack_context.NullContext():124            self._timeout = None125            while True:126                try:127                    ret, num_handles = self._multi.socket_action(128                        pycurl.SOCKET_TIMEOUT, 0)129                except pycurl.error as e:130                    ret = e.args[0]131                if ret != pycurl.E_CALL_MULTI_PERFORM:132                    break133            self._finish_pending_requests()134        # In theory, we shouldn't have to do this because curl will135        # call _set_timeout whenever the timeout changes.  However,136        # sometimes after _handle_timeout we will need to reschedule137        # immediately even though nothing has changed from curl's138        # perspective.  This is because when socket_action is139        # called with SOCKET_TIMEOUT, libcurl decides internally which140        # timeouts need to be processed by using a monotonic clock141        # (where available) while tornado uses python's time.time()142        # to decide when timeouts have occurred.  When those clocks143        # disagree on elapsed time (as they will whenever there is an144        # NTP adjustment), tornado might call _handle_timeout before145        # libcurl is ready.  After each timeout, resync the scheduled146        # timeout with libcurl's current state.147        new_timeout = self._multi.timeout()148        if new_timeout >= 0:149            self._set_timeout(new_timeout)150    def _handle_force_timeout(self):151        """Called by IOLoop periodically to ask libcurl to process any152        events it may have forgotten about.153        """154        with stack_context.NullContext():155            while True:156                try:157                    ret, num_handles = self._multi.socket_all()158                except pycurl.error as e:159                    ret = e.args[0]160                if ret != pycurl.E_CALL_MULTI_PERFORM:161                    break162            self._finish_pending_requests()163    def _finish_pending_requests(self):164        """Process any requests that were completed by the last165        call to multi.socket_action.166        """167        while True:168            num_q, ok_list, err_list = self._multi.info_read()169            for curl in ok_list:170                self._finish(curl)171            for curl, errnum, errmsg in err_list:172                self._finish(curl, errnum, errmsg)173            if num_q == 0:174                break175        self._process_queue()176    def _process_queue(self):177        with stack_context.NullContext():178            while True:179                started = 0180                while self._free_list and self._requests:181                    started += 1182                    curl = self._free_list.pop()183                    (request, callback) = self._requests.popleft()184                    curl.info = {185                        "headers": httputil.HTTPHeaders(),186                        "buffer": BytesIO(),187                        "request": request,188                        "callback": callback,189                        "curl_start_time": time.time(),190                    }191                    try:192                        self._curl_setup_request(193                            curl, request, curl.info["buffer"],194                            curl.info["headers"])195                    except Exception as e:196                        # If there was an error in setup, pass it on197                        # to the callback. Note that allowing the198                        # error to escape here will appear to work199                        # most of the time since we are still in the200                        # caller's original stack frame, but when201                        # _process_queue() is called from202                        # _finish_pending_requests the exceptions have203                        # nowhere to go.204                        self._free_list.append(curl)205                        callback(HTTPResponse(206                            request=request,207                            code=599,208                            error=e))209                    else:210                        self._multi.add_handle(curl)211                if not started:212                    break213    def _finish(self, curl, curl_error=None, curl_message=None):214        info = curl.info215        curl.info = None216        self._multi.remove_handle(curl)217        self._free_list.append(curl)218        buffer = info["buffer"]219        if curl_error:220            error = CurlError(curl_error, curl_message)221            code = error.code222            effective_url = None223            buffer.close()224            buffer = None225        else:226            error = None227            code = curl.getinfo(pycurl.HTTP_CODE)228            effective_url = curl.getinfo(pycurl.EFFECTIVE_URL)229            buffer.seek(0)230        # the various curl timings are documented at231        # http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html232        time_info = dict(233            queue=info["curl_start_time"] - info["request"].start_time,234            namelookup=curl.getinfo(pycurl.NAMELOOKUP_TIME),235            connect=curl.getinfo(pycurl.CONNECT_TIME),236            pretransfer=curl.getinfo(pycurl.PRETRANSFER_TIME),237            starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),238            total=curl.getinfo(pycurl.TOTAL_TIME),239            redirect=curl.getinfo(pycurl.REDIRECT_TIME),240        )241        try:242            info["callback"](HTTPResponse(243                request=info["request"], code=code, headers=info["headers"],244                buffer=buffer, effective_url=effective_url, error=error,245                reason=info['headers'].get("X-Http-Reason", None),246                request_time=time.time() - info["curl_start_time"],247                time_info=time_info))248        except Exception:249            self.handle_callback_exception(info["callback"])250    def handle_callback_exception(self, callback):251        self.io_loop.handle_callback_exception(callback)252    def _curl_create(self):253        curl = pycurl.Curl()254        if curl_log.isEnabledFor(logging.DEBUG):255            curl.setopt(pycurl.VERBOSE, 1)256            curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)257        if hasattr(pycurl,'PROTOCOLS'): # PROTOCOLS first appeared in pycurl 7.19.5 (2014-07-12)258            curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTP|pycurl.PROTO_HTTPS)259            curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTP|pycurl.PROTO_HTTPS)260        return curl261    def _curl_setup_request(self, curl, request, buffer, headers):262        curl.setopt(pycurl.URL, native_str(request.url))263        # libcurl's magic "Expect: 100-continue" behavior causes delays264        # with servers that don't support it (which include, among others,265        # Google's OpenID endpoint).  Additionally, this behavior has266        # a bug in conjunction with the curl_multi_socket_action API267        # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),268        # which increases the delays.  It's more trouble than it's worth,269        # so just turn off the feature (yes, setting Expect: to an empty270        # value is the official way to disable this)271        if "Expect" not in request.headers:272            request.headers["Expect"] = ""273        # libcurl adds Pragma: no-cache by default; disable that too274        if "Pragma" not in request.headers:275            request.headers["Pragma"] = ""276        curl.setopt(pycurl.HTTPHEADER,277                    ["%s: %s" % (native_str(k), native_str(v))278                     for k, v in request.headers.get_all()])279        curl.setopt(pycurl.HEADERFUNCTION,280                    functools.partial(self._curl_header_callback,281                                      headers, request.header_callback))282        if request.streaming_callback:283            def write_function(chunk):284                self.io_loop.add_callback(request.streaming_callback, chunk)285        else:286            write_function = buffer.write287        if bytes is str:  # py2288            curl.setopt(pycurl.WRITEFUNCTION, write_function)289        else:  # py3290            # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes291            # a fork/port.  That version has a bug in which it passes unicode292            # strings instead of bytes to the WRITEFUNCTION.  This means that293            # if you use a WRITEFUNCTION (which tornado always does), you cannot294            # download arbitrary binary data.  This needs to be fixed in the295            # ported pycurl package, but in the meantime this lambda will296            # make it work for downloading (utf8) text.297            curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))298        curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)299        curl.setopt(pycurl.MAXREDIRS, request.max_redirects)300        curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))301        curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))302        if request.user_agent:303            curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))304        else:305            curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")306        if request.network_interface:307            curl.setopt(pycurl.INTERFACE, request.network_interface)308        if request.decompress_response:309            curl.setopt(pycurl.ENCODING, "gzip,deflate")310        else:311            curl.setopt(pycurl.ENCODING, "none")312        if request.proxy_host and request.proxy_port:313            curl.setopt(pycurl.PROXY, request.proxy_host)314            curl.setopt(pycurl.PROXYPORT, request.proxy_port)315            if request.proxy_username:316                credentials = '%s:%s' % (request.proxy_username,317                                         request.proxy_password)318                curl.setopt(pycurl.PROXYUSERPWD, credentials)319            if (request.proxy_auth_mode is None or320                    request.proxy_auth_mode == "basic"):321                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)322            elif request.proxy_auth_mode == "digest":323                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)324            else:325                raise ValueError(326                    "Unsupported proxy_auth_mode %s" % request.proxy_auth_mode)327        else:328            curl.setopt(pycurl.PROXY, '')329            curl.unsetopt(pycurl.PROXYUSERPWD)330        if request.validate_cert:331            curl.setopt(pycurl.SSL_VERIFYPEER, 1)332            curl.setopt(pycurl.SSL_VERIFYHOST, 2)333        else:334            curl.setopt(pycurl.SSL_VERIFYPEER, 0)335            curl.setopt(pycurl.SSL_VERIFYHOST, 0)336        if request.ca_certs is not None:337            curl.setopt(pycurl.CAINFO, request.ca_certs)338        else:339            # There is no way to restore pycurl.CAINFO to its default value340            # (Using unsetopt makes it reject all certificates).341            # I don't see any way to read the default value from python so it342            # can be restored later.  We'll have to just leave CAINFO untouched343            # if no ca_certs file was specified, and require that if any344            # request uses a custom ca_certs file, they all must.345            pass346        if request.allow_ipv6 is False:347            # Curl behaves reasonably when DNS resolution gives an ipv6 address348            # that we can't reach, so allow ipv6 unless the user asks to disable.349            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)350        else:351            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)352        # Set the request method through curl's irritating interface which makes353        # up names for almost every single method354        curl_options = {355            "GET": pycurl.HTTPGET,356            "POST": pycurl.POST,357            "PUT": pycurl.UPLOAD,358            "HEAD": pycurl.NOBODY,359        }360        custom_methods = set(["DELETE", "OPTIONS", "PATCH"])361        for o in curl_options.values():362            curl.setopt(o, False)363        if request.method in curl_options:364            curl.unsetopt(pycurl.CUSTOMREQUEST)365            curl.setopt(curl_options[request.method], True)366        elif request.allow_nonstandard_methods or request.method in custom_methods:367            curl.setopt(pycurl.CUSTOMREQUEST, request.method)368        else:369            raise KeyError('unknown method ' + request.method)370        body_expected = request.method in ("POST", "PATCH", "PUT")371        body_present = request.body is not None372        if not request.allow_nonstandard_methods:373            # Some HTTP methods nearly always have bodies while others374            # almost never do. Fail in this case unless the user has375            # opted out of sanity checks with allow_nonstandard_methods.376            if ((body_expected and not body_present) or377                    (body_present and not body_expected)):378                raise ValueError(379                    'Body must %sbe None for method %s (unless '380                    'allow_nonstandard_methods is true)' %381                    ('not ' if body_expected else '', request.method))382        if body_expected or body_present:383            if request.method == "GET":384                # Even with `allow_nonstandard_methods` we disallow385                # GET with a body (because libcurl doesn't allow it386                # unless we use CUSTOMREQUEST). While the spec doesn't387                # forbid clients from sending a body, it arguably388                # disallows the server from doing anything with them.389                raise ValueError('Body must be None for GET request')390            request_buffer = BytesIO(utf8(request.body or ''))391            def ioctl(cmd):392                if cmd == curl.IOCMD_RESTARTREAD:393                    request_buffer.seek(0)394            curl.setopt(pycurl.READFUNCTION, request_buffer.read)395            curl.setopt(pycurl.IOCTLFUNCTION, ioctl)396            if request.method == "POST":397                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ''))398            else:399                curl.setopt(pycurl.UPLOAD, True)400                curl.setopt(pycurl.INFILESIZE, len(request.body or ''))401        if request.auth_username is not None:402            userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')403            if request.auth_mode is None or request.auth_mode == "basic":404                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)405            elif request.auth_mode == "digest":406                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)407            else:408                raise ValueError("Unsupported auth_mode %s" % request.auth_mode)409            curl.setopt(pycurl.USERPWD, native_str(userpwd))410            curl_log.debug("%s %s (username: %r)", request.method, request.url,411                           request.auth_username)412        else:413            curl.unsetopt(pycurl.USERPWD)414            curl_log.debug("%s %s", request.method, request.url)415        if request.client_cert is not None:416            curl.setopt(pycurl.SSLCERT, request.client_cert)417        if request.client_key is not None:418            curl.setopt(pycurl.SSLKEY, request.client_key)419        if request.ssl_options is not None:420            raise ValueError("ssl_options not supported in curl_httpclient")421        if threading.activeCount() > 1:422            # libcurl/pycurl is not thread-safe by default.  When multiple threads423            # are used, signals should be disabled.  This has the side effect424            # of disabling DNS timeouts in some environments (when libcurl is425            # not linked against ares), so we don't do it when there is only one426            # thread.  Applications that use many short-lived threads may need427            # to set NOSIGNAL manually in a prepare_curl_callback since428            # there may not be any other threads running at the time we call429            # threading.activeCount.430            curl.setopt(pycurl.NOSIGNAL, 1)431        if request.prepare_curl_callback is not None:432            request.prepare_curl_callback(curl)433    def _curl_header_callback(self, headers, header_callback, header_line):434        header_line = native_str(header_line.decode('latin1'))435        if header_callback is not None:436            self.io_loop.add_callback(header_callback, header_line)437        # header_line as returned by curl includes the end-of-line characters.438        # whitespace at the start should be preserved to allow multi-line headers439        header_line = header_line.rstrip()440        if header_line.startswith("HTTP/"):441            headers.clear()442            try:443                (__, __, reason) = httputil.parse_response_start_line(header_line)444                header_line = "X-Http-Reason: %s" % reason445            except httputil.HTTPInputError:446                return447        if not header_line:448            return449        headers.parse_line(header_line)450    def _curl_debug(self, debug_type, debug_msg):451        debug_types = ('I', '<', '>', '<', '>')452        if debug_type == 0:453            curl_log.debug('%s', debug_msg.strip())454        elif debug_type in (1, 2):455            for line in debug_msg.splitlines():456                curl_log.debug('%s %s', debug_types[debug_type], line)457        elif debug_type == 4:458            curl_log.debug('%s %r', debug_types[debug_type], debug_msg)459class CurlError(HTTPError):460    def __init__(self, errno, message):461        HTTPError.__init__(self, 599, message)462        self.errno = errno463if __name__ == "__main__":464    AsyncHTTPClient.configure(CurlAsyncHTTPClient)...CurlMaker.js
Source:CurlMaker.js  
1/**2 * Generate `curl` commands to use in data loading.3 * Available methods for customizing the configuration:4 * * contentType5 * * delimiter6 * * c3auth7 * * use78 - Set to true if using 7.8, creating auth token will fail8 * Available methods for making curls:9 * * makeCurl10 * * makeCurls11 * * makeCurlAllFSC12 * * makeCurlAllCanonical13 * @constructor14 * @example15 * //Overwrite default configurations.16 * var curl = new CurlMaker();17 * curl.contentType('text/csv').delimiter(',').c3auth('ExistingAuthToken');18 * @example19 * //Make a curl command20 * var curl = new CurlMaker();21 * curl.makeCurl('CanonicalMyCanonical','MyFileName.csv');22 * @example23 * //Make multiple curl commands24 * var curl = new CurlMaker();25 * curl.makeCurls({26 *   'Canonical1': 'FileName1.csv',27 *   'Canonical2': 'FileName2.csv',28 *   'Canonical3': 'FileName3.csv'29 * });30 */31function CurlMaker(){32  /* If 'new' was not used, use it. Makes sure 'this' refers to instance scope */33  if ( ! (this instanceof CurlMaker) ){34    return new CurlMaker();35  }36  //Some Config Things37  var _tokenMinutes = 240;38  var _contentType = 'text/csv';39  var _delimiter  = ',';40  //Current Context41  var _context = ActionContext.dump();42  var _user = _context.userName;43  var _tenant = _context.tenant;44  var _tag = _context.tag;45  var _host = _context.hostUrl;46  var _use78 = false;47  //Auth Token48  var _c3auth = null;49  /**50   * @function makeCurl51   * Make a curl command for a single canonical and filename.52   * @param {string} canonical Optional. Name of the canonical.53   * Defaults to 'CanonicalName' for easy find/replace.54   * @param {string} fileName Optional. Name of the file.55   * Defaults to 'FileName.csv' for easy find/replace.56   * @returns {string} The curl command.57   */58  var _makeCurl = function(canonical, fileName) {59    canonical = canonical || 'CanonicalName';60    fileName = fileName || 'FileName.csv';61    if ( !_c3auth ) {62      if ( _use78 ) {63        _c3auth = Authenticator.generateC3AuthToken(_user,null,_tokenMinutes); //This version for 7.864      } else {65        _c3auth = Authenticator.generateC3AuthToken(null,_tokenMinutes); //This version for 7.966      }67    }68    let url = _curlUrl(canonical, fileName);69    let contentType = 'Content-Type: '+_contentType+';delimiter="'+_delimiter+'"';70    let c = 'curl -v -H \''+contentType+'\' -X PUT --data-binary @./'+fileName71      +' '+url+' --cookie "c3auth='+_c3auth+'"';72    return c;73  }74  /**75   * @function makeCurls76   * Make multiple curl commands based on a dictionary of canonicals and files.77   * @param {Object} canonicalFileName Dictionary of cannical name and filename.78   * For Each key, will create a curl for a canonical of that key and filename of its value.79   * @returns {string[]} Array of curl commands.80   */81  var _makeCurls = function(canonicalFileName) {82    canonicalFileName = canonicalFileName || {};83    var canonicals = Object.keys(canonicalFileName);84    return canonicals.map((o)=>_makeCurl(o,canonicalFileName[o]));85  }86  /**87   * @function makeCurlAllFSC88   * Makes a curl command for each FileSourceCollection that89   * was not created by 'authorizer'.90   * @returns {string[]} Array of curl commands.91   */92  var _makeCurlAllFSC = function() {93    let fsc = _allFileSourceCollection() || [];94    return fsc.map((o)=>_makeCurl(o,null));95  }96  /**97   * @function makeCurlAllCanonical98   * Makes a curl command for each canonical of the current package.99   * Assumes a FileSourceCollection exists for each.100   * @returns {string[]} Array of curl commands.101   */102  var _makeCurlAllCanonical = function() {103    let c = _packageCanonicals() || [];104    return c.map((o)=>_makeCurl(o,null));105  }106  /**107   * Make the file import url for a curl command.108   * @private109   * @param {string} canonical Canonical name, defaults to 'CanonicalName'110   * @param {string} fileName File anem, defaults to 'FileName.csv'111   */112  var _curlUrl = function(canonical, fileName) {113    canonical = canonical || 'CanonicalName';114    fileName = fileName || 'FileName.csv';115    return _host + '/import/1/' + _tenant + '/' + _tag + '/' + canonical + '/' + fileName;116  }117  /**118   * Get the id of all FileSourceCollection not made by 'authorizer' (default ones).119   * @private120   * @returns {string[]} Array of ids121   */122  var _allFileSourceCollection = function() {123    //Get all FileSourceCollections not made by 'authorizer' (default ones)124    let objs = FileSourceCollection.fetch({filter: 'meta.createdBy!="authorizer"'}).objs || [];125    return objs.map((o)=>o.id);126  }127  /**128   * Gets all canonicals in the current package.129   * @private130   * @returns {string[]} Array of Canonical names131   */132  var _packageCanonicals = function() {133    //Get the root package (deployed package) for the current tag134    let tag = MetadataStore.tag();135    let p = tag.rootPackage();136    p = p.name || null;137    //List all types in this package138    let allTypes = p ? tag.typesByPackage(p) : [];139    //List all 'canonical' types140    let cTypes = tag.typesThatMixin({typeName:'Canonical'});141    //Filter canonicals types down to only ones in current package142    let typeDict = _.indexBy(allTypes,'typeName');143    cTypes = cTypes.filter((f)=>{return typeof typeDict[f.typeName] !== 'undefined'});144    cTypes = cTypes.map((m)=>m.typeName);145    return cTypes;146  }147  /* Make the object that will be returned */148  var curl = {149    contentType: function(_) {150      if (_) { _contentType = _; return this; }151      return _contentType;152    }153    ,delimiter: function(_) {154      if (_) { _delimiter = _; return this; }155      return _delimiter;156    },c3auth: function(_) {157      if (_) { _c3auth = _; return this; }158      return _c3auth;159    },use78: function(_) {160      if (_) { _use78 = _; return this; }161      return _use78;162    }163  }164  //Expose functions that need to be exposed165  curl.makeCurl = _makeCurl;166  curl.makeCurls = _makeCurls;167  curl.makeCurlAllFSC = _makeCurlAllFSC;168  curl.makeCurlAllCanonical = _makeCurlAllCanonical;169  /* Set the returned object's prototype to CurlMaker's prototype170   * All it really does is make instanceof CurlMaker return true */171  curl.__proto__ = this.__proto__;172  //Return the object173  return curl;...getqos_threads.py
Source:getqos_threads.py  
1from threading import Thread2from get_qos.models import QoS_datacaiji3from users.models import User4from io import BytesIO5import pycurl6import sys7class qosthread(Thread):8    def __init__(self, threadID,URL,email,price):9        Thread.__init__(self)10        self.threadID = threadID11        self.URL = URL12        self.email=email13        self.price=price14    def run(self):15        getdata(self.URL, self.email,self.price)16def getdata(URL,email,price):17    URL = URL.encode('utf-8')18    curl = pycurl.Curl()  # å建ä¸ä¸ªCurl对象19    curl.setopt(pycurl.SSL_VERIFYPEER, 0)20    curl.setopt(pycurl.SSL_VERIFYHOST, 0)21    curl.setopt(pycurl.URL, URL)  # å®ä¹è¯·æ±çURL常é22    curl.setopt(pycurl.ENCODING,'gzip,deflate')23    curl.setopt(pycurl.CONNECTTIMEOUT, 5)  # å®ä¹è¯·æ±è¿æ¥ççå¾
æ¶é´24    curl.setopt(pycurl.TIMEOUT, 5)  # å®ä¹è¯·æ±è¶
æ¶æ¶é´25    curl.setopt(pycurl.NOPROGRESS, 1)  # å±è½ä¸è½½è¿åº¦æ¡26    curl.setopt(pycurl.FORBID_REUSE, 1)  # å®æäº¤äºåå¼ºå¶æå¼è¿æ¥ï¼ä¸éç¨27    curl.setopt(pycurl.MAXREDIRS, 1)  # æå®HTTPéå®åçæå¤§æ°ä¸º128    curl.setopt(pycurl.DNS_CACHE_TIMEOUT, 30)  # 设置ä¿åDNSä¿¡æ¯çæ¶é´ä¸º30ç§29    buff = BytesIO()30    curl.setopt(pycurl.WRITEFUNCTION, buff.write)  # pycurl模åä¸å
·å¤åå¨çåè½ï¼æä»¥å°æ°æ®åå
¥åèæµå½ä¸31    try:32        curl.perform()  # æäº¤è¯·æ±33    except Exception as e:34        print("connecion error:" + str(e))35        buff.close()36        curl.close()37        sys.exit()38    # RESP0NSE_WORDS = buff.getvalue().decode("utf-8")39    NAMELOOKUP_TIME = curl.getinfo(curl.NAMELOOKUP_TIME)  # DNSè§£ææ¶é´,åä½us40    CONNECT_TIME = curl.getinfo(curl.CONNECT_TIME) * 1000  # 建ç«è¿æ¥æ¶é´,åä½ms41    PRETRANSFER_TIME = curl.getinfo(curl.PRETRANSFER_TIME) * 1000 # ä»å»ºç«è¿æ¥å°åå¤ä¼ è¾ææ¶èçæ¶é´,åä½ms42    STARTTRANSFER_TIME = curl.getinfo(curl.STARTTRANSFER_TIME) * 1000  # ä»å»ºç«è¿æ¥å°å¼å§ä¼ è¾æ¶èçæ¶é´,åä½ms43    REDIRECT_TIME = curl.getinfo(curl.REDIRECT_TIME)  # éå®åææ¶èçæ¶é´,åä½us44    TOTAL_TIME = curl.getinfo(curl.TOTAL_TIME) * 1000  # ä¼ è¾çæ»æ¶é´,åä½ms45    HTTP_CODE = curl.getinfo(curl.HTTP_CODE)  # HTTPç¶æç 46    SIZE_DOWNLOAD = curl.getinfo(curl.SIZE_DOWNLOAD)  # ä¸è½½æ°æ®å
大å°47    SIZE_UPLOAD = curl.getinfo(curl.SIZE_UPLOAD)  # ä¸ä¼ æ°æ®å
大å°48    HEADER_SIZE = curl.getinfo(curl.HEADER_SIZE)  # HTTP头é¨å¤§å°49    SPEED_DOWNLOAD = curl.getinfo(curl.SPEED_DOWNLOAD)  # å¹³åä¸è½½é度50    SPEED_UPLOAD = curl.getinfo(curl.SPEED_UPLOAD)  # å¹³åä¸ä¼ é度51    # con = response_dict.get("RESP0NSE_WORDS")52    # print("è¿åå
容ï¼%s" % con)53    # if 'HTTP_X_FORWARDED_FOR' in request.META:54    #     ip = request.META['HTTP_X_FORWARDED_FOR']55    # else:56    #     ip = request.META['REMOTE_ADDR']57        # æ°æ®ä¿åè³æ°æ®åº58    u = User.objects.get(email=email)59    qosdata = QoS_datacaiji(ws_url=URL,http_code=HTTP_CODE, total_time=TOTAL_TIME, size_download=SIZE_DOWNLOAD,namelookup_time=NAMELOOKUP_TIME,60                       connect_time=CONNECT_TIME,pretransfer_time=PRETRANSFER_TIME,starttransfer_time=STARTTRANSFER_TIME,redirect_time=REDIRECT_TIME,61                       size_upload=SIZE_UPLOAD,header_size=HEADER_SIZE,speed_download=SPEED_DOWNLOAD,speed_upload=SPEED_UPLOAD,price=price)62    qosdata.save()63    qosdata.user.add(u)  # cannot access related objects of a many-to-many relation before the instance is saved64    # æå°è¾åºç¸å
³æ°æ®65    # print("è¿åå
容ï¼%s" % RESP0NSE_WORDS)66    # print("HTTPç¶æç ï¼%s" % (HTTP_CODE))67    # print("DNSè§£ææ¶é´ï¼%.2f us" % (NAMELOOKUP_TIME))68    # print("建ç«è¿æ¥æ¶é´ï¼%.2f ms" % (CONNECT_TIME))69    # print("åå¤ä¼ è¾æ¶é´ï¼%.2f ms" % (PRETRANSFER_TIME))70    # print("ä¼ è¾å¼å§æ¶é´ï¼%.2f ms" % (STARTTRANSFER_TIME))71    # print("ä¼ è¾ç»ææ»æ¶é´ï¼%.2f ms" % (TOTAL_TIME))72    # print("éå®åææ¶èæ¶é´ï¼%.2f us" % (REDIRECT_TIME))73    # print("HTTP头é¨å¤§å°ï¼%d byte" % (HEADER_SIZE))74    # print("ä¸è½½æ°æ®å
大å°ï¼%d bytes/s" % (SIZE_DOWNLOAD))75    # print("ä¸ä¼ æ°æ®å
大å°ï¼%d bytes/s" % (SIZE_UPLOAD))76    # print("å¹³åä¸è½½é度ï¼%d bytes/s" % (SPEED_DOWNLOAD))77    # print("å¹³åä¸ä¼ é度ï¼%d bytes/s" % (SPEED_UPLOAD))78    buff.close()79    curl.close()        # å
³éCurl对象...qthreads.py
Source:qthreads.py  
1from threading import Thread2from get_qos.models import QoS_data3from users.models import User4from get_qos.models import Monitor5from io import BytesIO6import pycurl7import sys8import time9class qthread(Thread):10    def __init__(self, threadID,URL,email,price,endtime,internal):11        Thread.__init__(self)12        self.threadID = threadID13        self.URL = URL14        self.email=email15        self.price=price16        self.endtime=endtime17        self.internal=internal18    def run(self):19        getqos(self.URL, self.email,self.price,self.endtime,self.internal)20def getqos(URL,email,price,endtime,internal):21    time_passed=022    URL = URL.encode('utf-8')23    curl = pycurl.Curl()  # å建ä¸ä¸ªCurl对象24    curl.setopt(pycurl.SSL_VERIFYPEER, 0)25    curl.setopt(pycurl.SSL_VERIFYHOST, 0)26    curl.setopt(pycurl.URL, URL)  # å®ä¹è¯·æ±çURL常é27    curl.setopt(pycurl.ENCODING, 'gzip,deflate')28    curl.setopt(pycurl.CONNECTTIMEOUT, 10)  # å®ä¹è¯·æ±è¿æ¥ççå¾
æ¶é´29    curl.setopt(pycurl.TIMEOUT, 10)  # å®ä¹è¯·æ±è¶
æ¶æ¶é´30    curl.setopt(pycurl.NOPROGRESS, 1)  # å±è½ä¸è½½è¿åº¦æ¡31    curl.setopt(pycurl.FORBID_REUSE, 1)  # å®æäº¤äºåå¼ºå¶æå¼è¿æ¥ï¼ä¸éç¨32    curl.setopt(pycurl.MAXREDIRS, 1)  # æå®HTTPéå®åçæå¤§æ°ä¸º133    curl.setopt(pycurl.DNS_CACHE_TIMEOUT, 30)  # 设置ä¿åDNSä¿¡æ¯çæ¶é´ä¸º30ç§34    buff = BytesIO()35    curl.setopt(pycurl.WRITEFUNCTION, buff.write)  # pycurl模åä¸å
·å¤åå¨çåè½ï¼æä»¥å°æ°æ®åå
¥åèæµå½ä¸36    while(True):37        try:38            curl.perform()  # æäº¤è¯·æ±39        except Exception as e:40            print("connecion error:" + str(e))41            buff.close()42            curl.close()43            sys.exit()44        # RESP0NSE_WORDS = buff.getvalue().decode("utf-8")45        NAMELOOKUP_TIME = curl.getinfo(curl.NAMELOOKUP_TIME)  # DNSè§£ææ¶é´,åä½us46        CONNECT_TIME = curl.getinfo(curl.CONNECT_TIME) * 1000  # 建ç«è¿æ¥æ¶é´,åä½ms47        PRETRANSFER_TIME = curl.getinfo(curl.PRETRANSFER_TIME) * 1000 # ä»å»ºç«è¿æ¥å°åå¤ä¼ è¾ææ¶èçæ¶é´,åä½ms48        STARTTRANSFER_TIME = curl.getinfo(curl.STARTTRANSFER_TIME) * 1000  # ä»å»ºç«è¿æ¥å°å¼å§ä¼ è¾æ¶èçæ¶é´,åä½ms49        REDIRECT_TIME = curl.getinfo(curl.REDIRECT_TIME)  # éå®åææ¶èçæ¶é´,åä½us50        TOTAL_TIME = curl.getinfo(curl.TOTAL_TIME) * 1000  # ä¼ è¾çæ»æ¶é´,åä½ms51        HTTP_CODE = curl.getinfo(curl.HTTP_CODE)  # HTTPç¶æç 52        SIZE_DOWNLOAD = curl.getinfo(curl.SIZE_DOWNLOAD)  # ä¸è½½æ°æ®å
大å°53        SIZE_UPLOAD = curl.getinfo(curl.SIZE_UPLOAD)  # ä¸ä¼ æ°æ®å
大å°54        HEADER_SIZE = curl.getinfo(curl.HEADER_SIZE)  # HTTP头é¨å¤§å°55        SPEED_DOWNLOAD = curl.getinfo(curl.SPEED_DOWNLOAD)  # å¹³åä¸è½½é度56        SPEED_UPLOAD = curl.getinfo(curl.SPEED_UPLOAD)  # å¹³åä¸ä¼ é度57        u = User.objects.get(email=email)58        qosdata = QoS_data(ws_url=URL,http_code=HTTP_CODE, total_time=TOTAL_TIME, size_download=SIZE_DOWNLOAD,namelookup_time=NAMELOOKUP_TIME,59                           connect_time=CONNECT_TIME,pretransfer_time=PRETRANSFER_TIME,starttransfer_time=STARTTRANSFER_TIME,redirect_time=REDIRECT_TIME,60                           size_upload=SIZE_UPLOAD,header_size=HEADER_SIZE,speed_download=SPEED_DOWNLOAD,speed_upload=SPEED_UPLOAD,price=price)61        qosdata.save()62        qosdata.user.add(u)  # cannot access related objects of a many-to-many relation before the instance is saved63        time.sleep(internal)64        time_passed += internal65        if time_passed>endtime:66            break67    buff.close()68    curl.close()  # å
³éCurl对象69    out = Monitor.objects.get(ws_url=URL)70    out.is_monitor = 071    out.save()...Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
