# coding: utf-8 # Copyright 2009 Alexandre Fiori # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import struct import asyncio from asyncio_mongo.exceptions import ConnectionLostError import asyncio_mongo._bson as bson from asyncio_mongo.log import logger _ONE = b"\x01\x00\x00\x00" _ZERO = b"\x00\x00\x00\x00" """Low level connection to Mongo.""" class _MongoQuery(object): def __init__(self, id, collection, limit): self.id = id self.limit = limit self.collection = collection self.documents = [] self.future = asyncio.Future() class MongoProtocol(asyncio.Protocol): def __init__(self): self.__id = 0 self.__buffer = b"" self.__queries = {} self.__datalen = None self.__response = 0 self.__waiting_header = True self._pipelined_calls = set() # Set of all the pipelined calls. self.transport = None self._is_connected = False def connection_made(self, transport): self.transport = transport self._is_connected = True logger.log(logging.INFO, 'Mongo connection made with %') def connection_lost(self, exc): self._is_connected = False self.transport = None # Raise exception on all waiting futures. for f in self.__queries: f.set_exception(ConnectionLostError(exc)) logger.log(logging.INFO, 'Mongo connection lost') def data_received(self, data): while self.__waiting_header: self.__buffer += data if len(self.__buffer) < 16: break # got full header, 16 bytes (or more) header, extra = self.__buffer[:16], self.__buffer[16:] self.__buffer = b"" self.__waiting_header = False datalen, request, response, operation = struct.unpack("= 0, "Unexpected number of documents received!" if not next_batch: self.OP_KILL_CURSORS([cursor_id]) query.future.set_result(query.documents) return self.__queries[self.__id] = query self.OP_GET_MORE(query.collection, next_batch, cursor_id) else: query.future.set_result(query.documents)