Redis Module of Python Standard Library Series

Keywords: Redis Session Python github

Install Redis

install

Module GitHub address: https://github.com/WoLpH/redis-py

[root@anshengme ~]# yum -y install redis

Configure Binding IP

What I don't know in the process of learning can be added to me?
python learning communication deduction qun, 784758214
 There are good learning video tutorials, development tools and e-books in the group.
Share with you the current talent needs of python enterprises and how to learn python from zero foundation, and what to learn
[root@anshengme ~]# vim /etc/redis.conf 
bind 0.0.0.0

Start and set boot self-start

[root@anshengme ~]# systemctl start redis
[root@anshengme ~]# systemctl enable redis
Created symlink from /etc/systemd/system/multi-user.target.wants/redis.service to /usr/lib/systemd/system/redis.service.

inspect

View Port

[root@anshengme ~]# netstat -tlnp | grep "redis"
tcp        0      0 0.0.0.0:6379            0.0.0.0:*               LISTEN      1439/redis-server 0 

Data Writing Test

[root@anshengme ~]# /usr/bin/redis-cli 
127.0.0.1:6379> set url https://blog.ansheng.me
OK
127.0.0.1:6379> get url
"https://blog.ansheng.me"
127.0.0.1:6379> exit

Install redis-py

Install redis-py

pip3 install redis

Or source installation

python setup.py install

Check whether the installation was successful

# Import module installed successfully without error
>>> import redis

Introduction and Use

# Import module
>>> import redis
# Connect to Redis Server
>>> conn = redis.Redis(host='192.168.56.100', port=6379)
# Write a piece of data
>>> conn.set('name','ansheng')
True
# Get a piece of data
>>> conn.get('name')
b'ansheng'
>>> conn.get('url')
b'https://blog.ansheng.me'

Connect to Redis using connection pool

Behind the scenes, redis-py uses a connection pool to manage connections to a Redis server. By default, each Redis instance you create will in turn create its own connection pool. You can override this behavior and use an existing connection pool by passing an already created connection pool instance to the connection_pool argument of the Redis class. You may choose to do this in order to implement client side sharding or have finer grain control of how connections are managed.

>>> pool = redis.ConnectionPool(host='192.168.56.100', port=6379)
>>> conn = redis.Redis(connection_pool=pool)
>>> conn.set('hello','world')
True
>>> conn.get('hello')
b'world'

Using socket connections

>>> r = redis.Redis(unix_socket_path='/tmp/redis.sock')

API

The API provided by redis-py is used to manipulate redis

String API

set(name, value, ex=None, px=None, nx=False, xx=False)

parameter describe
ex Expiration time (seconds)
px Expiration time (milliseconds)
nx If set to True, the current set operation is executed only if name does not exist
xx If set to True, the pre-job set operation is performed only if name exists.
>>> conn.set('k1', 'v1', ex=10, nx=True)
True
>>> conn.get('k1')
b'v1'
>>> conn.get('k1')

setex(name, value, time)

Set expiration time/seconds

>>> conn.setex('k','v',1)
True
>>> conn.get('k')

psetex(name, time_ms, value)

Set expiration time/milliseconds

>>> conn.psetex('k',10,'v')
True
>>> conn.get('k')

setnx(name, value)

Set the value, only if the key does not exist, perform the setup operation

>>> conn.get('k1')
>>> conn.setnx('k1','v1')
True
>>> conn.get('k1')
b'v1'
>>> conn.setnx('k2','v2')
False

**mset(*args, kwargs)

Setting multiple keys/values at the same time

>>> conn.mset(k1='v1', k2='v2')
True
>>> conn.mset({'k1':'v1', 'k1':'v1'})
True

get(name)

Get a single value

>>> conn.get('k1')
b'v1'

*mget(keys, args)

Get multiple values

>>> conn.mget('k1','k2')
[b'v1', b'v2']
# Input list
>>> conn.mget(['name','url'])
[b'ansheng', b'https://blog.ansheng.me']

getset(name, value)

Set a new value and get the original value

>>> conn.set('hello', 'world')
True
>>> result = conn.getset('hello', 'Linux')
>>> result
b'world'
>>> conn.get('hello')
b'Linux'

getrange(key, start, end)

Obtain value by indexing

>>> conn.set('key','value')
True
>>> conn.getrange('key', 1, 4)
b'alue'

setrange(name, offset, value)

Modify value according to index

>>> conn.set('n','123456789')
True
>>> conn.setrange('n', 0, 'a')
9
>>> conn.get('n')
b'a23456789'

setbit(name, offset, value)

getbit(name, offset)

Get value 0/1 corresponding to an index location

>>> conn.getbit('k',1)
1

bitcount(key, start=None, end=None)

Get the number of key s representing 1 in the corresponding binary

*bitop(operation, dest, keys)

The result is saved in a new value by value operation of multiple values.

>>> conn.mset(n1='abc',n2='cde',n3='adc')
True
>>> conn.bitop('AND','now_key','n1','n2','n3')
3
>>> conn.get('now_key')
b'a`a'
>>> conn.mget('n1','n2','n3')
[b'abc', b'cde', b'adc']

operation supports AND (and), OR (or), NOT (non), XOR (XOR)

strlen(name)

Get the length of value

>>> conn.set('name','Ansheng')
True
>>> conn.strlen('name')
6

incr(name, amount=1)

value of name is self-incremental, if name does not exist, it is created, otherwise self-incremental

>>> conn.get('number')
>>> conn.incr('number')
1
>>> conn.get('number')
b'1'
>>> conn.incr('number')
2
>>> conn.incr('number', 10)
12

incrbyfloat(name, amount=1.0)

Ibid., support floating point self-increment

>>> conn.incrbyfloat('number', 1.5)
13.5
>>> conn.incrbyfloat('number', 1.1)
14.6

decr(name, amount=1)

Decreasing by itself, like increasing by itself, is an error if the value of self-decreasing is not an integer.

>>> conn.set('n', 10)
True
>>> conn.decr('n')
9
>>> conn.decr('n', 9)
0

append(key, value)

Append the value

>>> conn.set('blog','https://blog.ansheng.me')
True
>>> conn.append('blog','/')
26
>>> conn.get('blog')
b'https://blog.ansheng.me/'

Hash API

hset(name, key, value)

Set the key-value pair of name, modify it if you have it, and create it if you haven't.

>>> conn.hset('dic','k1','v1')
1
>>> conn.hget('dic','k1')
b'v1'

hmset(name, mapping)

Setting key/value of multiple name s at the same time

>>> conn.hmset('dic', {'k1': 'v1', 'k2': 'v2'})
True
>>> conn.hget('dic','k2')
b'v2'

hget(name, key)

Get the value of key in name

>>> conn.hget('dic','k2')
b'v2'

*hmget(name, keys, args)

Getting multiple at the same time

>>> conn.hmget('dic',['k1', 'k2'])
[b'v1', b'v2']
>>> conn.hmget('dic','k1', 'k2')
[b'v1', b'v2']

hgetall(name)

Get all key/value corresponding to name

>>> conn.hgetall('dic')
{b'k1': b'v1', b'k2': b'v2'}

hlen(name)

Get the number of key-value pairs corresponding to name

>>> conn.hlen('dic')
2

hkeys(name)

Get all key s in name

>>> conn.hkeys('dic')
[b'k1', b'k2']

hvals(name)

Get all value s in name

>>> conn.hvals('dic')
[b'v1', b'v2']

hexists(name, key)

Check if there is an incoming key in the current name

>>> conn.hexists('dic','k1')
True
>>> conn.hexists('dic','kk')
False

*hdel(self, name, keys)

Delete the corresponding key in name

>>> conn.hdel('dic','k1')
1
>>> conn.hget('dic','k1')

hincrby(name, key, amount=1)

The value corresponding to the key in the name increases by itself, and if it does not exist, it is created

>>> conn.hincrby('dic','number')
1
>>> conn.hincrby('dic','number',10)
11

hincrbyfloat(name, key, amount=1.0)

value increases to support floating-point numbers, ibid.

>>> conn.hincrbyfloat('dic','float')
1.0
>>> conn.hincrbyfloat('dic','float',0.3)
1.3

hscan(name, cursor=0, match=None, count=None)

Incremental iterative acquisition, hscan can achieve the piecewise acquisition of data, not all data acquisition at one time, so that the memory is exploded

parameter describe
name The name of redis
cursor Cursor (batch acquisition of data based on cursor)
match Match the specified key, default None represents all keys
count The minimum number of fragments is obtained per fragment. The default None represents the default number of fragments using Redis.

hscan_iter(name, match=None, count=None)

Using yield to encapsulate hscan to create generators to achieve batch data acquisition in redis

parameter describe
match Match the specified key, default None represents all keys
count The minimum number of fragments is obtained per fragment. The default None represents the default number of fragments using Redis.

Such as:

for item in r.hscan_iter('xx'):
    print item

expire(name, time)

Set expiration time

>>> conn.hset('info','BlogUrl','https://blog.ansheng.me')
1
>>> conn.expire('info', 10)
True
>>> conn.hget('info','BlogUrl')
b'https://blog.ansheng.me'
>>> conn.hget('info','BlogUrl')

ListAPI

*lpush(name, values)

Add values on the leftmost side

>>> conn.lpush('li', 11,22,33)
3
>>> conn.lindex('li', 0)
b'33'

*rpush(name, values)

Add values on the rightmost side

>>> conn.rpush('lli', 11,22,33)
3
>>> conn.lindex('lli', 0)
b'11'

lpushx(name, value)

When only name already exists, the value is added to the leftmost side of the list

>>> conn.lpushx('li', 'aa')
4
>>> conn.lindex('li', 0)
b'aa'

rpushx(name, value)

When only name already exists, the value is added to the rightmost side of the list

>>> conn.rpushx('li', 'bb')
5
>>> conn.lindex('li', 0)
b'aa'
>>> conn.lindex('li', 4)
b'bb'

llen(name)

Get the number of name elements

>>> conn.llen('li')
5

linsert(name, where, refvalue, value)

Insert a new value before or after a value of name

>>> conn.linsert('li','AFTER','11','cc')
6
>>> conn.lindex('li', 3)
b'11'
>>> conn.lindex('li', 4)
b'cc'

lset(name, index, value)

Reassign the value of index position in name

>>> conn.lindex('li', 4)
b'cc'
>>> conn.lset('li', 4, 'hello')
True
>>> conn.lindex('li', 4)
b'hello'

lrem(name, value, num=0)

Delete the value after or before the specified value

  1. num=2, from front to back, delete 2;
  2. num=-2, from back to front, delete 2
>>> conn.llen('li')
6
>>> conn.lrem('li', 'hello')
1
>>> conn.llen('li')
5
>>> conn.lrem('li', '22', num=2)
2
>>> conn.llen('li')
3

lpop(name)

Delete the first element on the left of name

>>> conn.lindex('li', 0)
b'11'
>>> conn.lpop('li')
b'11'

rpop(name)

Delete the first element on the right of name

>>> conn.rpop('li')
b'33'

lindex(name, index)

Get the value of the corresponding index in name

>>> conn.lindex('li', 0)
b'aa'

lrange(name, start, end)

Using slices to get data

>>> conn.llen('li')
8
>>> conn.lrange('li',0,5)
[b'3', b'23', b'34', b'235', b'2', b'1']

ltrim(name, start, end)

Remove values that are not between start-end indexes from the list corresponding to name

>>> conn.ltrim('li',0,5)
True
>>> conn.llen('li')
6

rpoplpush(src, dst)

Take the rightmost element from the src list and add it to the leftmost element of the dst list

>>> conn.lpush('li1', 1,2,3)
3
>>> conn.lpush('li2', 'a','b','c')
3
>>> conn.rpoplpush('li1','li2')
b'1'

blpop(keys, timeout=0)
brpop(keys, timeout=0)

brpoplpush(src, dst, timeout=0)

Remove an element from the right side of the src list and add it to the left side of the dst list

>>> conn.lpush('ll', 'a','b','c')
3
>>> conn.lpush('aa', 'a','b','c')
3
>>> conn.brpoplpush('ll','aa')
b'a'

Timeout, when there is no data in the list corresponding to src, the timeout time (seconds) of blocking waiting for its data is zero, which means permanent blocking

Custom incremental iteration

Since the redis class library does not provide incremental iterations of list elements, if you want to loop all elements of the list corresponding to the name, you need:

  1. Get all lists corresponding to name
  2. Loop List

However, if the list is very large, it is possible to explode the content of the program in the first step, and it is necessary to customize an incremental iteration function:

def list_iter(name):
    """
    Custom redis list incremental iteration
    Param name: name in redis, i.e. iteration name corresponding list
    return: yield returns list elements
    """
    list_count = r.llen(name)
    for index in xrange(list_count):
        yield r.lindex(name, index)

Use

for item in list_iter('pp'):
    print item

SET API

*sadd(name, values)

Add a value for name, but not if it exists

>>> conn.sadd('s1', 1)
1
>>> conn.sadd('s1', 1)
0

scard(name)

Number of elements returning name

>>> conn.scard('s1')
1

*sdiff(keys, args)

Data not in other collections in keys collections

>>> conn.sdiff('s1','s2')
{b'c', b'v', b'a'}

*sdiffstore(dest, keys, args)

Data in keys collections that are not in other collections is saved in dest collections

>>> conn.sdiffstore('news','s1','s2')
3
>>> conn.scard('news')
3

*sinter(keys, args)

Get the union of keys and other collections

>>> conn.sinter('s1','s2')
{b'2', b'3', b'1'}

*sinterstore(dest, keys, args)

Get the Union data in keys and other collections and save it in dest collections

>>> conn.sinterstore('news1','s1','s2')
3

sismember(name, value)

Gets whether value is a member of the name collection

>>> conn.sismember('news1','1')
True
>>> conn.sismember('news1','aa1')
False

smembers(name)

Get all the members in the name collection

>>> conn.smembers('news1')
{b'2', b'3', b'1'}

smove(src, dst, value)

Move value in src to dst

>>> conn.smembers('s1')
{b'c', b'2', b'v', b'1', b'3', b'a'}
>>> conn.smembers('s2')
{b'2', b'3', b'1'}
>>> conn.smove('s1','s2','v')
True
>>> conn.smembers('s1')
{b'c', b'2', b'a', b'3', b'1'}
>>> conn.smembers('s2')
{b'2', b'v', b'3', b'1'}

spop(name)

Delete and return random members in name

>>> conn.smembers('s2')
{b'2', b'v', b'3', b'1'}
>>> conn.spop('s2')
b'3'
>>> conn.smembers('s2')
{b'2', b'v', b'1'}
>>> conn.spop('s2')
b'2'
>>> conn.smembers('s2')
{b'v', b'1'}
>>> conn.spop('s2')
b'1'
>>> conn.smembers('s2')
{b'v'}

srandmember(name, number=None)

Random access to number members in the name set, default number=1

>>> conn.smembers('s1')
{b'c', b'2', b'a', b'3', b'1'}
>>> conn.srandmember('s1')
b'1'
>>> conn.srandmember('s1')
b'a'
>>> conn.srandmember('s1',number=2)
[b'3', b'a']
>>> conn.srandmember('s1',number=2)
[b'1', b'2']

*srem(name, values)

Delete values data in name collection

>>> conn.smembers('s1')
{b'c', b'2', b'a', b'3', b'1'}
>>> conn.srem('s1','1','2')
2
>>> conn.smembers('s1')
{b'c', b'a', b'3'}

*sunion(keys, args)

Get the union of keys and other collections

>>> conn.sadd('a1',1,2,3)
3
>>> conn.sadd('a2',1,2,3,4,5,6,7)
7
>>> conn.sunion('a2','a1')
{b'2', b'7', b'1', b'3', b'6', b'5', b'4'}

*sunionstore(dest, keys, args)

Get the union of keys and other collections and save them in dest

>>> conn.sunionstore('a3', 'a2','a1')
7
>>> conn.smembers('a3')
{b'2', b'7', b'1', b'3', b'6', b'5', b'4'}

Ordered set API

**zadd(name, *args, kwargs)

>>> conn.zadd('h1','n1',11,'n2',22)
2
>>> conn.zadd('h2',n1=11,n2=22)
2

zcard(name)

Returns the number of name elements in an ordered collection

>>> conn.zcard('h1')
2

zcount(name, min, max)

Returns the number of values between min and max in the name median

>>> conn.zcount('h1',10,30)
2

zincrby(name, value, amount=1)

Let the value of value be added to amount in name

>>> conn.zincrby('h1','n1',10)
21.0

zinterstore(dest, keys, aggregate=None)
zlexcount(name, min, max)

zrange(name, start, end, desc=False, withscores=False, score_cast_func=float)

parameter describe
name The name of redis
start Ordered Set Index Starting Location (Non-fractional)
end Ordered Set Index End Location (Non-fractional)
desc Sorting rules, by default, by fraction from small to large
withscores Whether to get the element's score or not, by default only get the element's value
score_cast_func Functions for Data Conversion of Fractions
>>> conn.zrange('h1', 1, 2, desc=True, withscores=True, score_cast_func=float)
[(b'n1', 21.0)]
>>> conn.zrange('h1', 1, 2, desc=False, withscores=True, score_cast_func=float)
[(b'n2', 22.0)]
# Ranking from large to small
zrevrange(name, start, end, withscores=False, score_cast_func=float) 
# Get the elements of the ordered set corresponding to name according to the scoring range
zrangebyscore(name, min, max, start=None, num=None, withscores=False, score_cast_func=float)
# Ranking from large to small
zrevrangebyscore(name, max, min, start=None, num=None, withscores=False, score_cast_func=float)

zrangebylex(name, min, max, start=None, num=None)

When all members of an ordered set have the same score, the elements of the ordered set are sorted according to the value of the members (lexicographical ordering), and this command can return the members whose values are between min and max in a given ordered set key.

Each member of the set is byte-by-byte compare d and returned in order from low to high. If parts of the two strings are identical, the command will assume that the longer strings are larger than the shorter strings.

parameter describe
name The name of redis
min The left interval (value) +denotes positive infinite; - denotes negative infinite; (denotes open interval; [denotes closed interval].
min Right Interval (Value)
start Fragmentation of results, indexing location
num The results are sliced and num elements behind the index are indexed

Such as:

ZADD myzset 0 aa 0 ba 0 ca 0 da 0 ea 0 fa 0 ga
# r.zrangebylex('myzset', "-", "[ca") results: ['aa','ba','ca']

More:

# Ranking from large to small
zrevrangebylex(name, max, min, start=None, num=None)

zrevrangebylex(name, max, min, start=None, num=None)

zrangebyscore(name, min, max, start=None, num=None, withscores=False, score_cast_func=float)**

zrank(name, value)

Returns a zero-based value indicating the ranking of values in the sorting set name

>>> conn.zrank('h1','n1')
0
>>> conn.zrank('h1','n2')
1

zrevrank(name, value), sort from big to small

*zrem(name, values)

Delete the corresponding values in name

>>> conn.zrem('h1','n2')
1
>>> conn.zrem('h2',['n1','n2'])
2

zremrangebyrank(name, min, max)

Delete according to ranking range

>>> conn.zremrangebyrank('h1',1,2)
1

zremrangebyscore(name, min, max)

Delete by Score Range

>>> conn.zremrangebyscore('h1',10,20)
2

zscore(name, value)

Returns the value of the specified value

>>> conn.zscore('h1','n1')
11.0

zunionstore(dest, keys, aggregate=None)

Global API

*delete(names)

Delete names in redis

>>> conn.delete('ooo')
1

exists(name)

Check if name exists

>>> conn.exists('iii')
False
>>> conn.exists('h1')
True

keys(pattern='*')

# Match all key s in the database 
>>> conn.keys(pattern='*')
[b'h2', b'kasd1', b'n2', b'url', b'name', b'n', b'li1', b'n1', b's1', b'now_key', b'l', b's2', b'number', b'numbers', b'a2', b'dic', b'a1', b'news', b'news1', b'aa', b'key', b'lli', b'll', b'k', b'li', b'k2', b'h1', b'li2', b'ccc', b'k1', b'blog', b'kaasdsd1', b'a3', b'l1', b'l2', b'n3', b'a']
# Match hello, hallo, hxllo, etc.
conn.keys(pattern='h?llo')
# Matching hllo, heeello, etc.
conn.keys(pattern='h*llo')
# Match hello and hallo, but not hillo
conn.keys(pattern='h[ae]llo')

rename(src, dst)

Rename src to dst

>>> conn.set('k','v')
True
>>> conn.get('k')
b'v'
>>> conn.rename('k', 'kk')
True
>>> conn.get('k')
>>> conn.get('kk')
b'v'

move(name, db)

Move a value of redis under the specified db

randomkey()

Random access to a redis name without deletion

>>> conn.randomkey()
b'll'
>>> conn.randomkey()
b'news1'

type(name)

View the type of name

>>> conn.type('kk')
b'string'

The Conduit

redis-py defaults to create (connection pool application connection) and disconnect (return connection pool) one connection operation for each request. If you want to specify more than one command in one request, you can use pipline to implement one request to specify more than one command, and by default, one pipline is an atomic operation (in MySQL). Transactions).

>>> import redis
>>> pool = redis.ConnectionPool(host='192.168.56.100', port=6379)
>>> r = redis.Redis(connection_pool=pool)
# Create a channel to support transactions
>>> pipe = r.pipeline(transaction=True)
#
>>> r.set('hello', 'world')
True
>>> r.set('blog', 'ansheng.me')
True
# If there is an error in setting the above two values, the commit will not be executed.
>>> pipe.execute()
[]

publish-subscribe

# monitor.py
#!/usr/bin/env python
# -*- coding:utf-8 -*-

import redis

class RedisHelper:
    def __init__(self):
        self.__conn = redis.Redis(host='192.168.56.100')
        self.chan_sub = 'fm104.5'
        self.chan_pub = 'fm104.5'

    def public(self, msg):
        self.__conn.publish(self.chan_pub, msg)
        return True

    def subscribe(self):
        pub = self.__conn.pubsub()
        pub.subscribe(self.chan_sub)
        pub.parse_response()
        return pub

# subscriber.py Subscriber
#!/usr/bin/env python
# -*- coding:utf-8 -*-

from monitor import RedisHelper

obj = RedisHelper()
redis_sub = obj.subscribe()

while True:
    msg = redis_sub.parse_response()
    print(msg)

# Annuncer.py Publisher
#!/usr/bin/env python
# -*- coding:utf-8 -*-

from monitor import RedisHelper

obj = RedisHelper()
obj.public('hello')

Example

Let redis cache tornado pages

# _*_coding:utf-8 _*_

import tornado.ioloop
import tornado.web
import time
import redis

poll = redis.ConnectionPool(host='192.168.56.100', port=6379)
conn = redis.Redis(connection_pool=poll)

class MainHandler(tornado.web.RequestHandler):
    def get(self):
        CurrentTim = conn.get('CurrentTim')
        if CurrentTim:
            self.write(CurrentTim)
        else:
            CurrentTim = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
            conn.set('CurrentTim', CurrentTim, ex=5)
            self.write(CurrentTim)

settings = {
    "tempalte_path": "template",
}

application = tornado.web.Application([
    (r'/', MainHandler),
], **settings)

if __name__ == "__main__":
    application.listen(9999)
    tornado.ioloop.IOLoop.instance().start()

Redis-based Session Storage

app.py

# _*_coding:utf-8 _*_

import tornado.ioloop
import tornado.web
import RedisToSession

class BaseHandler(tornado.web.RequestHandler):
    def initialize(self):
        self.session = RedisToSession.Session(self)

class MainHandler(BaseHandler):
    def get(self):
        Info = self.session.GetAll()

        self.render("template/index.html", Data=Info)

    def post(self, *args, **kwargs):
        # Get the passed value
        Key = self.get_argument('key')
        Val = self.get_argument('val')
        action = self.get_argument('action')
        if action == 'set':
            # Setting values
            self.session[Key] = Val
        elif action == 'del':
            del self.session[Key]

        # Get all the information
        Info = self.session.GetAll()
        # Return to front-end rendering
        self.render("template/index.html", Data=Info)

settings = {
    "tempalte_path": "template",
    "cookie_secret": "508CE6152CB93994628D3E99934B83CC",
}

application = tornado.web.Application([
    (r'/', MainHandler),
], **settings)

if __name__ == "__main__":
    application.listen(9999)
    tornado.ioloop.IOLoop.instance().start()

template\index.html

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
</head>
<body>

<form action="/" method="post">
    set/del: <input type="text" name="action" value="set"/>
    Key: <input type="text" name="key"/>
    Val: <input type="text" name="val"/>
    <input type="submit" value="Set up"/>
</form>

{{ Data }}

</body>
</html>

If you are still confused in the world of programming, you can join our Python learning button qun: 784758214 to see how our predecessors learned! Exchange experience! I am a senior Python development engineer, from basic Python script to web development, crawler, django, data mining and so on, zero-based to the actual project data have been collated. To every Python buddy! Share some learning methods and small details that need attention. Click to join us. python learner gathering place

RedisToSession.py

# _*_ coding: utf-8 _*_

import redis
import hashlib
import uuid
import json

# Connect memcached
pool = redis.ConnectionPool(host='192.168.56.100', port=6379)
conn = redis.Redis(connection_pool=pool)

class Session:
    CookieID = 'uc'
    ExpiresTime = 60 * 20

    def __init__(self, handler):
        """
        //Dictionary for creating user session in redis
        :param handler: Request header
        """
        self.handler = handler
        # Getting random strings from the client
        SessionID = self.handler.get_secure_cookie(Session.CookieID, None)
        # The client exists and also exists on the server side.
        if SessionID and conn.exists(SessionID):
            self.SessionID = SessionID
        else:
            # Get random strings
            self.SessionID = self.SessionKey()
            # Write random strings to memcached for 20 minutes
            conn.hset(self.SessionID, None, None)

        # Each visit timeout is increased by 20 minutes
        conn.expire(self.SessionID, Session.ExpiresTime)

        # Setting up Cookie
        self.handler.set_secure_cookie('uc', self.SessionID)

    def SessionKey(self):
        """
        :return: Generate random strings
        """
        UUID = str(uuid.uuid1()).replace('-', '')
        MD5 = hashlib.md5()
        MD5.update(bytes(UUID, encoding='utf-8'))
        SessionKey = MD5.hexdigest()
        return SessionKey

    def __setitem__(self, key, value):
        """
        :param key: session In Information key
        :param value: Corresponding Value
        """
        conn.hset(self.SessionID, key, value)

    def __getitem__(self, item):
        """
        :param item: Session Corresponding in Information Key
        :return: Acquired Session information
        """
        # Get the corresponding data
        ResultData = conn.hget(self.SessionID, item)
        return ResultData

    def __delitem__(self, key):
        """
        :param key: To delete Key
        """
        conn.hdel(self.SessionID, key)

    def GetAll(self):
        # Get all the information in Session for testing only
        SessionData = conn.hgetall(self.SessionID)
        return SessionData

Posted by Nukeum66 on Tue, 20 Aug 2019 00:09:44 -0700