Catalog
Process mutex
Multi process and rush to buy the remaining tickets at the same time
# Concurrent operation, high efficiency, but the competition to write the same file, data write disorderly # The content of the data.json file is {"ticket_num": 1} import json import time from multiprocessing import Process def search(user): with open('data.json', 'r', encoding='utf-8') as f: dic = json.load(f) print(f'user{user}Check the remaining tickets{dic.get("ticket_num")}...') def buy(user): with open('data.json', 'r', encoding='utf-8') as f: dic = json.load(f) time.sleep(0.1) if dic['ticket_num'] > 0: dic['ticket_num'] -= 1 with open('data.json', 'w', encoding='utf-8') as f: json.dump(dic, f) print(f'user{user}Successful ticket grabbing!') else: print(f'user{user}Ticket failure') def run(user): search(user) buy(user) if __name__ == '__main__': for i in range(10): # Simulation of ticket grabbing by 10 users p = Process(target=run, args=(f'user{i}', )) p.start()
Using locks to secure data
# The content of the data.json file is {"ticket_num": 1} import json import time from multiprocessing import Process, Lock def search(user): with open('data.json', 'r', encoding='utf-8') as f: dic = json.load(f) print(f'user{user}Check the remaining tickets{dic.get("ticket_num")}...') def buy(user): with open('data.json', 'r', encoding='utf-8') as f: dic = json.load(f) time.sleep(0.2) if dic['ticket_num'] > 0: dic['ticket_num'] -= 1 with open('data.json', 'w', encoding='utf-8') as f: json.dump(dic, f) print(f'user{user}Successful ticket grabbing!') else: print(f'user{user}Ticket failure') def run(user, mutex): search(user) mutex.acquire() # Lock up buy(user) mutex.release() # Release lock if __name__ == '__main__': # Call the Lock() class to get a lock object mutex = Lock() for i in range(10): # Simulation of ticket grabbing by 10 users p = Process(target=run, args=(f'user{i}', mutex)) p.start()
Process mutex:
- Make concurrent serial, sacrifice execution efficiency, and ensure data security
- When the program is concurrent, you need to modify the data usage.
queue
The queue follows first in, first out
Queue: it is equivalent to a queue space in memory, which can store multiple data, but the order of data is first in first.
q.put() add data
q.get() takes data and follows the queue first in first out
Q.get ﹣ nowait(), get the queue data, no error will be reported in the queue.
If the queue is full, an error will be reported.
q.full() to see if the queue is full
q.empty() to see if the queue is empty
from multiprocessing import Queue # Call queue class, instantiate queue object q = Queue(5) # 5 data in the queue # put adds data. If the data in the queue is full, it will jam. q.put(1) print('Enter data 1') q.put(2) print('Enter data 2') q.put(3) print('Enter data 3') q.put(4) print('Enter data 4') q.put(5) print('Enter data 5') # See if the queue is full print(q.full()) # Add data. If the queue is full, an error will be reported. q.put_nowait(6) # The data obtained by q.get() follows the first in, first out rule print(q.get()) print(q.get()) print(q.get()) print(q.get()) print(q.get()) # print(q.get()) print(q.get_nowait()) # Get the queue data, no error will be reported in the queue # Judge whether the queue is empty print(q.empty()) q.put(6) print('Enter data 6')
Interprocess communication
IPC(Inter-Process Communication)
Data between processes is isolated from each other. If you want to achieve inter process communication, you can use queues.
from multiprocessing import Process, Queue def task1(q): data = 'hello Hello' q.put(data) print('Process 1 add data to queue') def task2(q): print(q.get()) print('Process 2 get data from queue') if __name__ == '__main__': q = Queue() p1 = Process(target=task1, args=(q, )) p2 = Process(target=task2, args=(q, )) p1.start() p2.start() print('Main process')
Producer and Consumer
In the program, the data is added to the queue by the queue producer, and the consumer gets the data from the queue
from multiprocessing import Process, Queue import time # Producer def producer(name, food, q): for i in range(10): data = food, i msg = f'user{name}Start making{data}' print(msg) q.put(data) time.sleep(0.1) # Consumer def consumer(name, q): while True: data = q.get() if not data: break print(f'user{name}Start eating{data}') if __name__ == '__main__': q = Queue() p1 = Process(target=producer, args=('neo', 'A pancake', q)) p2 = Process(target=producer, args=('wick', 'Meat bag', q)) c1 = Process(target=consumer, args=('cwz', q)) c2 = Process(target=consumer, args=('woods', q)) p1.start() p2.start() c1.daemon = True c2.daemon = True c1.start() c2.start() print('main')
thread
The concept of thread
Processes and threads are virtual units
Process: resource units
Threads: execution units
When you start a process, there must be a thread. The thread is the real executor.
Start process:
- Create a namespace, and each process will occupy a memory resource.
- Will bring a thread
Open thread:
- One process can start multiple threads
- Threads cost far less than processes
Note: threads cannot be parallel, threads can only be concurrent, and processes can be parallel
Two ways to create threads
from threading import Thread import time # Create thread mode 1 def task(): print('Thread opening') time.sleep(1) print('Thread end') if __name__ == '__main__': t = Thread(target=task) t.start() # Create thread mode 2 class MyThread(Thread): def run(self): print('Thread opening...') time.sleep(1) print('Thread end...') if __name__ == '__main__': t = MyThread() t.start()
Method of thread object
from threading import Thread from threading import current_thread import time def task(): print(f'Thread opening{current_thread().name}') time.sleep(1) print(f'Thread end{current_thread().name}') if __name__ == '__main__': t = Thread(target=task) print(t.isAlive()) # t.daemon = True t.start() print(t.isAlive())
Thread mutex
Data is shared between threads
from threading import Thread from threading import Lock import time mutex = Lock() n = 100 def task(i): print(f'thread{i}start-up') global n mutex.acquire() temp = n time.sleep(0.1) n = temp - 1 print(n) mutex.release() if __name__ == '__main__': t_l = [] for i in range(100): t = Thread(target=task, args=(i, )) t_l.append(t) t.start() for t in t_l: t.join() print(n)