112751Sqtt2@cornell.edu/* 212751Sqtt2@cornell.edu * Copyright (c) 2018, Cornell University 312751Sqtt2@cornell.edu * All rights reserved. 412751Sqtt2@cornell.edu * 512751Sqtt2@cornell.edu * Redistribution and use in source and binary forms, with or 612751Sqtt2@cornell.edu * without modification, are permitted provided that the following 712751Sqtt2@cornell.edu * conditions are met: 812751Sqtt2@cornell.edu * 912751Sqtt2@cornell.edu * Redistributions of source code must retain the above copyright 1012751Sqtt2@cornell.edu * notice, this list of conditions and the following disclaimer. 1112751Sqtt2@cornell.edu * 1212751Sqtt2@cornell.edu * Redistributions in binary form must reproduce the above 1312751Sqtt2@cornell.edu * copyright notice, this list of conditions and the following 1412751Sqtt2@cornell.edu * disclaimer in the documentation and/or other materials provided 1512751Sqtt2@cornell.edu * with the distribution. 1612751Sqtt2@cornell.edu * 1712751Sqtt2@cornell.edu * Neither the name of Cornell University nor the names of its 1812751Sqtt2@cornell.edu * contributors may be used to endorse or promote products derived 1912751Sqtt2@cornell.edu * from this software without specific prior written permission. 2012751Sqtt2@cornell.edu * 2112751Sqtt2@cornell.edu * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND 2212751Sqtt2@cornell.edu * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, 2312751Sqtt2@cornell.edu * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 2412751Sqtt2@cornell.edu * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 2512751Sqtt2@cornell.edu * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR 2612751Sqtt2@cornell.edu * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 2712751Sqtt2@cornell.edu * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 2812751Sqtt2@cornell.edu * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF 2912751Sqtt2@cornell.edu * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED 3012751Sqtt2@cornell.edu * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 3112751Sqtt2@cornell.edu * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 3212751Sqtt2@cornell.edu * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 3312751Sqtt2@cornell.edu * POSSIBILITY OF SUCH DAMAGE. 3412751Sqtt2@cornell.edu * 3512751Sqtt2@cornell.edu * Authors: Tuan Ta 3612751Sqtt2@cornell.edu */ 3712751Sqtt2@cornell.edu 3812751Sqtt2@cornell.edu#include <pthread.h> 3912751Sqtt2@cornell.edu 4012751Sqtt2@cornell.edu#include <atomic> 4112751Sqtt2@cornell.edu#include <cstdlib> 4212751Sqtt2@cornell.edu#include <iostream> 4312751Sqtt2@cornell.edu 4412751Sqtt2@cornell.edu//------------------------------------------------------------------------ 4512751Sqtt2@cornell.edu// Create n threads, run them in parallel and wait for them in the master 4612751Sqtt2@cornell.edu// thread. 4712751Sqtt2@cornell.edu// Each child thread increments a shared variable m times atomically 4812751Sqtt2@cornell.edu//------------------------------------------------------------------------ 4912751Sqtt2@cornell.edu 5012751Sqtt2@cornell.edu#define MAX_N_WORKER_THREADS 10 5112751Sqtt2@cornell.edu 5212751Sqtt2@cornell.edutypedef struct 5312751Sqtt2@cornell.edu{ 5412751Sqtt2@cornell.edu int nsteps; 5512751Sqtt2@cornell.edu std::atomic<int>* shared_var; 5612751Sqtt2@cornell.edu} ThreadArg; 5712751Sqtt2@cornell.edu 5812751Sqtt2@cornell.eduvoid* func( void* args ) 5912751Sqtt2@cornell.edu{ 6012751Sqtt2@cornell.edu ThreadArg* my_args = ( ThreadArg* ) args; 6112751Sqtt2@cornell.edu 6212751Sqtt2@cornell.edu int nsteps = my_args->nsteps; 6312751Sqtt2@cornell.edu std::atomic<int>* shared_var = my_args->shared_var; 6412751Sqtt2@cornell.edu 6512751Sqtt2@cornell.edu for ( int i = 0; i < nsteps; ++i ) { 6612751Sqtt2@cornell.edu std::atomic_fetch_add(shared_var, 1); 6712751Sqtt2@cornell.edu } 6812751Sqtt2@cornell.edu 6912751Sqtt2@cornell.edu return nullptr; 7012751Sqtt2@cornell.edu} 7112751Sqtt2@cornell.edu 7212751Sqtt2@cornell.eduint main( int argc, const char* argv[] ) 7312751Sqtt2@cornell.edu{ 7412751Sqtt2@cornell.edu int n_worker_threads = 0; 7512751Sqtt2@cornell.edu 7612751Sqtt2@cornell.edu // allocate all threads 7712751Sqtt2@cornell.edu pthread_t* threads = new pthread_t[MAX_N_WORKER_THREADS]; 7812751Sqtt2@cornell.edu 7912751Sqtt2@cornell.edu // variable shared among all threads 8012751Sqtt2@cornell.edu std::atomic<int> shared_var(0); 8112751Sqtt2@cornell.edu 8212751Sqtt2@cornell.edu // number of steps each thread increments the shared_var 8312751Sqtt2@cornell.edu int nsteps = 1000; 8412751Sqtt2@cornell.edu 8512751Sqtt2@cornell.edu // set up threads' arguments 8612751Sqtt2@cornell.edu ThreadArg* t_args = new ThreadArg[MAX_N_WORKER_THREADS]; 8712751Sqtt2@cornell.edu 8812751Sqtt2@cornell.edu int ret = 0; 8912751Sqtt2@cornell.edu for ( size_t tid = 0; tid < MAX_N_WORKER_THREADS; tid++ ){ 9012751Sqtt2@cornell.edu t_args[tid].nsteps = nsteps; 9112751Sqtt2@cornell.edu t_args[tid].shared_var = &shared_var; 9212751Sqtt2@cornell.edu 9312751Sqtt2@cornell.edu // spawn thread 9412751Sqtt2@cornell.edu ret = pthread_create( threads + tid, nullptr, func, &t_args[tid] ); 9512751Sqtt2@cornell.edu 9612751Sqtt2@cornell.edu if (ret != 0) { 9712751Sqtt2@cornell.edu break; 9812751Sqtt2@cornell.edu } 9912751Sqtt2@cornell.edu 10012751Sqtt2@cornell.edu n_worker_threads++; 10112751Sqtt2@cornell.edu } 10212751Sqtt2@cornell.edu 10312751Sqtt2@cornell.edu // sync up all threads 10412751Sqtt2@cornell.edu for ( int tid = 0; tid < n_worker_threads; ++tid ) { 10512751Sqtt2@cornell.edu pthread_join( threads[tid], nullptr ); 10612751Sqtt2@cornell.edu } 10712751Sqtt2@cornell.edu 10812751Sqtt2@cornell.edu // clean up 10912751Sqtt2@cornell.edu delete[] threads; 11012751Sqtt2@cornell.edu delete[] t_args; 11112751Sqtt2@cornell.edu 11212751Sqtt2@cornell.edu // verify 11312751Sqtt2@cornell.edu if ( shared_var != n_worker_threads * nsteps || n_worker_threads < 1) 11412751Sqtt2@cornell.edu return EXIT_FAILURE; 11512751Sqtt2@cornell.edu 11612751Sqtt2@cornell.edu return EXIT_SUCCESS; 11712751Sqtt2@cornell.edu} 118