求大虾帮忙啊
#include<stdio.h>
#include<cuda_runtime.h>
#include<cutil.h>
#include<stdlib.h>
#define DATA_SIZE 256*5//求0~256*5-1的平方和。
#define THREAD_NUM 256
__global__ static void sumOfSquares(int *num, int* result,
clock_t* time)
{
const int tid = threadIdx.x;
const int size = DATA_SIZE / THREAD_NUM;
int sum = 0;
int i;
clock_t start;
if(tid == 0) start = clock();
for(i = tid * size; i < (tid + 1) * size;i ++) {
sum += i*i;
}
result[tid] = sum;
if(tid == 0) *time = clock() - start;
}
int main()
{
int *gpudata,*result,data[DATA_SIZE];
clock_t *time;
cudaMalloc((void**)&gpudata,sizeof(int)*DATA_SIZE);
cudaMalloc((void**)&result,sizeof(int)*THREAD_NUM);
cudaMalloc((void**)&time,sizeof(clock_t));
cudaMemcpy(gpudata,data,sizeof(int)*DATA_SIZE,cudaMemcpyHostToDevice);
sumOfSquares<<<1, THREAD_NUM, 0>>>(gpudata, result, time);
int sum[THREAD_NUM];
clock_t time_used;
cudaMemcpy(sum, result, sizeof(int) * THREAD_NUM,
cudaMemcpyDeviceToHost);
cudaMemcpy(&time_used, time, sizeof(clock_t),
cudaMemcpyDeviceToHost);
cudaFree(gpudata);
cudaFree(result);
cudaFree(time);
long long int final_sum = 0;
for(int i = 0; i < THREAD_NUM; i++) {
final_sum += sum[i];
}
printf("sum: %d time: %d\n", final_sum, time_used);//gpu所算出的结果和运行的时间。
final_sum = 0;
clock_t time1;
time1=clock();
for(int i = 0; i < DATA_SIZE; i++) {
final_sum+= i*i;
}
time1=clock()-time1;
printf("sum (CPU): %d %d\n", final_sum,time1);//cpu所算出的结果和运行的时间。
return 0;
}
但问题是结果倒是一样,但时间gpu所运行时间是2342,而cpu为1,他们的单位是什么,为什么gpu运行时那么多,是不是算错了,求求指教一下,这个已经折磨我好久