Python note day36 (concurrent) | multiprocess, join(), daemons, locking

Keywords: socket JSON

1. Multi process

import os
import time
from multiprocessing import Process
def func(args,args2):
    print(args,args2)
    time.sleep(3)
    print('Child process :', os.getpid())
    print('Parent of child process :', os.getppid())
    print(12345)

if __name__ == '__main__':
    p = Process(target=func,args=('parameter','Parameter 2'))   # register
    # p is a process object, and the process has not been started
    p.start()       # Started a subprocess
    print('*'*10)
    print('Parent process :',os.getpid()) # View the process number of the current process
    print('Parent process of parent process :',os.getppid()) # View the parent process of the current process

# The life cycle of a process
    # Main process
    # Child process
    # The main process of the child process is opened:
        # If the main process's own code is long, wait for the execution of its own code to finish,
        # The execution time of the subprocess is long. The main process will wait for the subprocess to finish executing after the main process code finishes executing

2. join() method

# join()
import time
from multiprocessing import Process

def func(arg1,arg2):
    print('*'*arg1)
    time.sleep(5)
    print('*'*arg2)

if __name__ == '__main__':
    p = Process(target=func,args=(10,20))
    p.start()
    print('hahahaha')
    p.join()     # It is to sense the end of a subprocess and change the asynchronous program to synchronous
    print('====== : Run out')

3. Enable multiple sub threads

import os
import time
from multiprocessing import Process

def func(filename,content):
    with open(filename,'w') as f:
        f.write(content*10*'*')

if __name__ == '__main__':
    p_lst = []
    for i in range(10):
        p = Process(target=func,args=('info%s'%i,0))
        p_lst.append(p)
        p.start()
    for p in p_lst:p.join()   # All previous processes must be executed here to execute the following code
    print([i for i in os.walk(r'E:\python10\day37')])

# Synchronization 0.1 * 500 = 50
# Asynchronous 500 0.1 = 0.1
# Multi process write file
# First write the file to the folder
# Show the user all file names in the folder after the file is written

4. Use object to start multithreading

import os
from multiprocessing import Process

class MyProcess(Process):
    def __init__(self,arg1,arg2):
        super().__init__()
        self.arg1 = arg1
        self.arg2 = arg2

    def run(self):
        print(self.pid)
        print(self.name)
        print(self.arg1)
        print(self.arg2)

if __name__ == '__main__':
    p1 = MyProcess(1,2)
    p1.start()
    p2 = MyProcess(3,4)
    p2.start()

# Custom class inherits Process class
# You must implement a run method, which is the code executed in the subprocess

5. Process direct data isolation

# Data between processes is isolated
import os
from multiprocessing import Process

def func():
    global n   # Declared a global variable
    n = 0       # Redefined an n
    print('pid : %s'%os.getpid(),n)

if __name__ == '__main__':
    n = 100
    p = Process(target=func)
    p.start()
    p.join()
    print(os.getpid(),n)

6. Daemons

#The daemons end as the main process's code executes
 #End a subprocess p.terminate() within the main process
    #Ending a process does not take effect immediately after executing a method. It requires an operating system response
 #The state of p.is'alive() to check whether a process is alive
 #p.name p.pid name and process number of this process
# Subprocess -- > Daemons
import time
from multiprocessing import Process

def func():
    while True:
        time.sleep(0.2)
        print('I am Alive')

def func2():
    print('in func2 start')
    time.sleep(8)
    print('in func2 finished')

if __name__ == '__main__':
    p = Process(target=func)
    p.daemon = True   # Set child process as Daemons
    p.start()
    p2 = Process(target=func2)
    p2.start()
    p2.terminate()     # End a subprocess
    time.sleep(1)
    print(p2.is_alive())  # Testing whether a process is still alive
    print(p2.name)
    # i = 0
    # while i<5:
    #     print('I'm socket server ')
    #     time.sleep(1)
    #     i+=1

7, lock up

File ticket content {"ticket": 1}

# lock

# Train tickets
import json
import time
from multiprocessing import Process
from multiprocessing import Lock

# def show(i):
#     with open('ticket') as f:
#         dic = json.load(f)
#     print('remaining ticket s:% s'%dic['ticket '])

def buy_ticket(i,lock):
    lock.acquire() #Get the key in the door
    with open('ticket') as f:
        dic = json.load(f)
        time.sleep(0.1)
    if dic['ticket'] > 0 :
        dic['ticket'] -= 1
        print('\033[32m%s Tickets are available.\033[0m'%i)
    else:
        print('\033[31m%s No tickets available.\033[0m'%i)
    time.sleep(0.1)
    with open('ticket','w') as f:
        json.dump(dic,f)
    lock.release()      # Return key

if __name__ == '__main__':
    # for i in range(10):
    #     p = Process(target=show,args=(i,))
    #     p.start()
    lock = Lock()
    for i in range(10):
        p = Process(target=buy_ticket, args=(i,lock))
        p.start()

8. Use multithreading to realize socket server concurrency

client terminal

import socket

sk = socket.socket()
sk.connect(('127.0.0.1',8080))
msg = sk.recv(1024).decode('utf-8')
print(msg)
msg2 = input('>>>').encode('utf-8')
sk.send(msg2)
sk.close()

server terminal

import socket
from multiprocessing import Process
def serve(conn):
    ret = 'Hello'.encode('utf-8')
    conn.send(ret)
    msg = conn.recv(1024).decode('utf-8')
    print(msg)
    conn.close()

if __name__ == '__main__' :
    sk = socket.socket()
    sk.bind(('127.0.0.1',8080))
    sk.listen()
    try:
        while True:
            conn,addr = sk.accept()
            p = Process(target=serve,args=(conn,))
            p.start()
    finally:
        sk.close()

Posted by hoffmeister on Tue, 31 Dec 2019 18:35:05 -0800