Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(364)

Side by Side Diff: base/trace_event/trace_event_unittest.cc

Issue 1852433005: Convert //base to use std::unique_ptr (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: rebase after r384946 Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/trace_event/trace_event.h"
6
5 #include <math.h> 7 #include <math.h>
6 #include <stddef.h> 8 #include <stddef.h>
7 #include <stdint.h> 9 #include <stdint.h>
8 10
9 #include <cstdlib> 11 #include <cstdlib>
12 #include <memory>
10 13
11 #include "base/bind.h" 14 #include "base/bind.h"
12 #include "base/command_line.h" 15 #include "base/command_line.h"
13 #include "base/json/json_reader.h" 16 #include "base/json/json_reader.h"
14 #include "base/json/json_writer.h" 17 #include "base/json/json_writer.h"
15 #include "base/location.h" 18 #include "base/location.h"
16 #include "base/macros.h" 19 #include "base/macros.h"
17 #include "base/memory/ref_counted_memory.h" 20 #include "base/memory/ref_counted_memory.h"
18 #include "base/memory/scoped_ptr.h"
19 #include "base/memory/singleton.h" 21 #include "base/memory/singleton.h"
20 #include "base/process/process_handle.h" 22 #include "base/process/process_handle.h"
21 #include "base/single_thread_task_runner.h" 23 #include "base/single_thread_task_runner.h"
22 #include "base/stl_util.h" 24 #include "base/stl_util.h"
23 #include "base/strings/pattern.h" 25 #include "base/strings/pattern.h"
24 #include "base/strings/stringprintf.h" 26 #include "base/strings/stringprintf.h"
25 #include "base/synchronization/waitable_event.h" 27 #include "base/synchronization/waitable_event.h"
26 #include "base/threading/platform_thread.h" 28 #include "base/threading/platform_thread.h"
27 #include "base/threading/thread.h" 29 #include "base/threading/thread.h"
28 #include "base/time/time.h" 30 #include "base/time/time.h"
29 #include "base/trace_event/trace_buffer.h" 31 #include "base/trace_event/trace_buffer.h"
30 #include "base/trace_event/trace_event.h"
31 #include "base/trace_event/trace_event_synthetic_delay.h" 32 #include "base/trace_event/trace_event_synthetic_delay.h"
32 #include "base/values.h" 33 #include "base/values.h"
33 #include "testing/gmock/include/gmock/gmock.h" 34 #include "testing/gmock/include/gmock/gmock.h"
34 #include "testing/gtest/include/gtest/gtest.h" 35 #include "testing/gtest/include/gtest/gtest.h"
35 36
36 namespace base { 37 namespace base {
37 namespace trace_event { 38 namespace trace_event {
38 39
39 namespace { 40 namespace {
40 41
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
176 num_flush_callbacks_++; 177 num_flush_callbacks_++;
177 if (num_flush_callbacks_ > 1) { 178 if (num_flush_callbacks_ > 1) {
178 EXPECT_FALSE(events_str->data().empty()); 179 EXPECT_FALSE(events_str->data().empty());
179 } 180 }
180 AutoLock lock(lock_); 181 AutoLock lock(lock_);
181 json_output_.json_output.clear(); 182 json_output_.json_output.clear();
182 trace_buffer_.Start(); 183 trace_buffer_.Start();
183 trace_buffer_.AddFragment(events_str->data()); 184 trace_buffer_.AddFragment(events_str->data());
184 trace_buffer_.Finish(); 185 trace_buffer_.Finish();
185 186
186 scoped_ptr<Value> root = base::JSONReader::Read( 187 std::unique_ptr<Value> root = base::JSONReader::Read(
187 json_output_.json_output, JSON_PARSE_RFC | JSON_DETACHABLE_CHILDREN); 188 json_output_.json_output, JSON_PARSE_RFC | JSON_DETACHABLE_CHILDREN);
188 189
189 if (!root.get()) { 190 if (!root.get()) {
190 LOG(ERROR) << json_output_.json_output; 191 LOG(ERROR) << json_output_.json_output;
191 } 192 }
192 193
193 ListValue* root_list = NULL; 194 ListValue* root_list = NULL;
194 ASSERT_TRUE(root.get()); 195 ASSERT_TRUE(root.get());
195 ASSERT_TRUE(root->GetAsList(&root_list)); 196 ASSERT_TRUE(root->GetAsList(&root_list));
196 197
197 // Move items into our aggregate collection 198 // Move items into our aggregate collection
198 while (root_list->GetSize()) { 199 while (root_list->GetSize()) {
199 scoped_ptr<Value> item; 200 std::unique_ptr<Value> item;
200 root_list->Remove(0, &item); 201 root_list->Remove(0, &item);
201 trace_parsed_.Append(item.release()); 202 trace_parsed_.Append(item.release());
202 } 203 }
203 204
204 if (!has_more_events) 205 if (!has_more_events)
205 flush_complete_event->Signal(); 206 flush_complete_event->Signal();
206 } 207 }
207 208
208 static bool CompareJsonValues(const std::string& lhs, 209 static bool CompareJsonValues(const std::string& lhs,
209 const std::string& rhs, 210 const std::string& rhs,
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
259 continue; 260 continue;
260 DictionaryValue* dict = static_cast<DictionaryValue*>(value); 261 DictionaryValue* dict = static_cast<DictionaryValue*>(value);
261 262
262 if (IsAllKeyValueInDict(key_values, dict)) 263 if (IsAllKeyValueInDict(key_values, dict))
263 return dict; 264 return dict;
264 } 265 }
265 return NULL; 266 return NULL;
266 } 267 }
267 268
268 void TraceEventTestFixture::DropTracedMetadataRecords() { 269 void TraceEventTestFixture::DropTracedMetadataRecords() {
269 scoped_ptr<ListValue> old_trace_parsed(trace_parsed_.DeepCopy()); 270 std::unique_ptr<ListValue> old_trace_parsed(trace_parsed_.DeepCopy());
270 size_t old_trace_parsed_size = old_trace_parsed->GetSize(); 271 size_t old_trace_parsed_size = old_trace_parsed->GetSize();
271 trace_parsed_.Clear(); 272 trace_parsed_.Clear();
272 273
273 for (size_t i = 0; i < old_trace_parsed_size; i++) { 274 for (size_t i = 0; i < old_trace_parsed_size; i++) {
274 Value* value = NULL; 275 Value* value = NULL;
275 old_trace_parsed->Get(i, &value); 276 old_trace_parsed->Get(i, &value);
276 if (!value || value->GetType() != Value::TYPE_DICTIONARY) { 277 if (!value || value->GetType() != Value::TYPE_DICTIONARY) {
277 trace_parsed_.Append(value->DeepCopy()); 278 trace_parsed_.Append(value->DeepCopy());
278 continue; 279 continue;
279 } 280 }
(...skipping 960 matching lines...) Expand 10 before | Expand all | Expand 10 after
1240 ~Convertable() override {} 1241 ~Convertable() override {}
1241 void AppendAsTraceFormat(std::string* out) const override { 1242 void AppendAsTraceFormat(std::string* out) const override {
1242 (*num_calls_)++; 1243 (*num_calls_)++;
1243 out->append("\"metadata_value\""); 1244 out->append("\"metadata_value\"");
1244 } 1245 }
1245 1246
1246 private: 1247 private:
1247 int* num_calls_; 1248 int* num_calls_;
1248 }; 1249 };
1249 1250
1250 scoped_ptr<ConvertableToTraceFormat> conv1(new Convertable(&num_calls)); 1251 std::unique_ptr<ConvertableToTraceFormat> conv1(new Convertable(&num_calls));
1251 scoped_ptr<Convertable> conv2(new Convertable(&num_calls)); 1252 std::unique_ptr<Convertable> conv2(new Convertable(&num_calls));
1252 1253
1253 BeginTrace(); 1254 BeginTrace();
1254 TRACE_EVENT_API_ADD_METADATA_EVENT( 1255 TRACE_EVENT_API_ADD_METADATA_EVENT(
1255 TraceLog::GetCategoryGroupEnabled("__metadata"), "metadata_event_1", 1256 TraceLog::GetCategoryGroupEnabled("__metadata"), "metadata_event_1",
1256 "metadata_arg_name", std::move(conv1)); 1257 "metadata_arg_name", std::move(conv1));
1257 TRACE_EVENT_API_ADD_METADATA_EVENT( 1258 TRACE_EVENT_API_ADD_METADATA_EVENT(
1258 TraceLog::GetCategoryGroupEnabled("__metadata"), "metadata_event_2", 1259 TraceLog::GetCategoryGroupEnabled("__metadata"), "metadata_event_2",
1259 "metadata_arg_name", std::move(conv2)); 1260 "metadata_arg_name", std::move(conv2));
1260 // |AppendAsTraceFormat| should only be called on flush, not when the event 1261 // |AppendAsTraceFormat| should only be called on flush, not when the event
1261 // is added. 1262 // is added.
(...skipping 785 matching lines...) Expand 10 before | Expand all | Expand 10 after
2047 } 2048 }
2048 2049
2049 private: 2050 private:
2050 DISALLOW_COPY_AND_ASSIGN(MyData); 2051 DISALLOW_COPY_AND_ASSIGN(MyData);
2051 }; 2052 };
2052 2053
2053 TEST_F(TraceEventTestFixture, ConvertableTypes) { 2054 TEST_F(TraceEventTestFixture, ConvertableTypes) {
2054 TraceLog::GetInstance()->SetEnabled(TraceConfig(kRecordAllCategoryFilter, ""), 2055 TraceLog::GetInstance()->SetEnabled(TraceConfig(kRecordAllCategoryFilter, ""),
2055 TraceLog::RECORDING_MODE); 2056 TraceLog::RECORDING_MODE);
2056 2057
2057 scoped_ptr<ConvertableToTraceFormat> data(new MyData()); 2058 std::unique_ptr<ConvertableToTraceFormat> data(new MyData());
2058 scoped_ptr<ConvertableToTraceFormat> data1(new MyData()); 2059 std::unique_ptr<ConvertableToTraceFormat> data1(new MyData());
2059 scoped_ptr<ConvertableToTraceFormat> data2(new MyData()); 2060 std::unique_ptr<ConvertableToTraceFormat> data2(new MyData());
2060 TRACE_EVENT1("foo", "bar", "data", std::move(data)); 2061 TRACE_EVENT1("foo", "bar", "data", std::move(data));
2061 TRACE_EVENT2("foo", "baz", "data1", std::move(data1), "data2", 2062 TRACE_EVENT2("foo", "baz", "data1", std::move(data1), "data2",
2062 std::move(data2)); 2063 std::move(data2));
2063 2064
2064 // Check that scoped_ptr<DerivedClassOfConvertable> are properly treated as 2065 // Check that std::unique_ptr<DerivedClassOfConvertable> are properly treated
2066 // as
2065 // convertable and not accidentally casted to bool. 2067 // convertable and not accidentally casted to bool.
2066 scoped_ptr<MyData> convertData1(new MyData()); 2068 std::unique_ptr<MyData> convertData1(new MyData());
2067 scoped_ptr<MyData> convertData2(new MyData()); 2069 std::unique_ptr<MyData> convertData2(new MyData());
2068 scoped_ptr<MyData> convertData3(new MyData()); 2070 std::unique_ptr<MyData> convertData3(new MyData());
2069 scoped_ptr<MyData> convertData4(new MyData()); 2071 std::unique_ptr<MyData> convertData4(new MyData());
2070 TRACE_EVENT2("foo", "string_first", "str", "string value 1", "convert", 2072 TRACE_EVENT2("foo", "string_first", "str", "string value 1", "convert",
2071 std::move(convertData1)); 2073 std::move(convertData1));
2072 TRACE_EVENT2("foo", "string_second", "convert", std::move(convertData2), 2074 TRACE_EVENT2("foo", "string_second", "convert", std::move(convertData2),
2073 "str", "string value 2"); 2075 "str", "string value 2");
2074 TRACE_EVENT2("foo", "both_conv", "convert1", std::move(convertData3), 2076 TRACE_EVENT2("foo", "both_conv", "convert1", std::move(convertData3),
2075 "convert2", std::move(convertData4)); 2077 "convert2", std::move(convertData4));
2076 EndTraceAndFlush(); 2078 EndTraceAndFlush();
2077 2079
2078 // One arg version. 2080 // One arg version.
2079 DictionaryValue* dict = FindNamePhase("bar", "X"); 2081 DictionaryValue* dict = FindNamePhase("bar", "X");
(...skipping 599 matching lines...) Expand 10 before | Expand all | Expand 10 after
2679 TraceLog::GetInstance()->SetEnabled( 2681 TraceLog::GetInstance()->SetEnabled(
2680 TraceConfig(kRecordAllCategoryFilter, RECORD_CONTINUOUSLY), 2682 TraceConfig(kRecordAllCategoryFilter, RECORD_CONTINUOUSLY),
2681 TraceLog::RECORDING_MODE); 2683 TraceLog::RECORDING_MODE);
2682 TraceBuffer* buffer = TraceLog::GetInstance()->trace_buffer(); 2684 TraceBuffer* buffer = TraceLog::GetInstance()->trace_buffer();
2683 size_t capacity = buffer->Capacity(); 2685 size_t capacity = buffer->Capacity();
2684 size_t num_chunks = capacity / TraceBufferChunk::kTraceBufferChunkSize; 2686 size_t num_chunks = capacity / TraceBufferChunk::kTraceBufferChunkSize;
2685 uint32_t last_seq = 0; 2687 uint32_t last_seq = 0;
2686 size_t chunk_index; 2688 size_t chunk_index;
2687 EXPECT_EQ(0u, buffer->Size()); 2689 EXPECT_EQ(0u, buffer->Size());
2688 2690
2689 scoped_ptr<TraceBufferChunk*[]> chunks(new TraceBufferChunk*[num_chunks]); 2691 std::unique_ptr<TraceBufferChunk* []> chunks(
2692 new TraceBufferChunk*[num_chunks]);
2690 for (size_t i = 0; i < num_chunks; ++i) { 2693 for (size_t i = 0; i < num_chunks; ++i) {
2691 chunks[i] = buffer->GetChunk(&chunk_index).release(); 2694 chunks[i] = buffer->GetChunk(&chunk_index).release();
2692 EXPECT_TRUE(chunks[i]); 2695 EXPECT_TRUE(chunks[i]);
2693 EXPECT_EQ(i, chunk_index); 2696 EXPECT_EQ(i, chunk_index);
2694 EXPECT_GT(chunks[i]->seq(), last_seq); 2697 EXPECT_GT(chunks[i]->seq(), last_seq);
2695 EXPECT_EQ((i + 1) * TraceBufferChunk::kTraceBufferChunkSize, 2698 EXPECT_EQ((i + 1) * TraceBufferChunk::kTraceBufferChunkSize,
2696 buffer->Size()); 2699 buffer->Size());
2697 last_seq = chunks[i]->seq(); 2700 last_seq = chunks[i]->seq();
2698 } 2701 }
2699 2702
2700 // Ring buffer is never full. 2703 // Ring buffer is never full.
2701 EXPECT_FALSE(buffer->IsFull()); 2704 EXPECT_FALSE(buffer->IsFull());
2702 2705
2703 // Return all chunks in original order. 2706 // Return all chunks in original order.
2704 for (size_t i = 0; i < num_chunks; ++i) 2707 for (size_t i = 0; i < num_chunks; ++i)
2705 buffer->ReturnChunk(i, scoped_ptr<TraceBufferChunk>(chunks[i])); 2708 buffer->ReturnChunk(i, std::unique_ptr<TraceBufferChunk>(chunks[i]));
2706 2709
2707 // Should recycle the chunks in the returned order. 2710 // Should recycle the chunks in the returned order.
2708 for (size_t i = 0; i < num_chunks; ++i) { 2711 for (size_t i = 0; i < num_chunks; ++i) {
2709 chunks[i] = buffer->GetChunk(&chunk_index).release(); 2712 chunks[i] = buffer->GetChunk(&chunk_index).release();
2710 EXPECT_TRUE(chunks[i]); 2713 EXPECT_TRUE(chunks[i]);
2711 EXPECT_EQ(i, chunk_index); 2714 EXPECT_EQ(i, chunk_index);
2712 EXPECT_GT(chunks[i]->seq(), last_seq); 2715 EXPECT_GT(chunks[i]->seq(), last_seq);
2713 last_seq = chunks[i]->seq(); 2716 last_seq = chunks[i]->seq();
2714 } 2717 }
2715 2718
2716 // Return all chunks in reverse order. 2719 // Return all chunks in reverse order.
2717 for (size_t i = 0; i < num_chunks; ++i) { 2720 for (size_t i = 0; i < num_chunks; ++i) {
2718 buffer->ReturnChunk( 2721 buffer->ReturnChunk(num_chunks - i - 1, std::unique_ptr<TraceBufferChunk>(
2719 num_chunks - i - 1, 2722 chunks[num_chunks - i - 1]));
2720 scoped_ptr<TraceBufferChunk>(chunks[num_chunks - i - 1]));
2721 } 2723 }
2722 2724
2723 // Should recycle the chunks in the returned order. 2725 // Should recycle the chunks in the returned order.
2724 for (size_t i = 0; i < num_chunks; ++i) { 2726 for (size_t i = 0; i < num_chunks; ++i) {
2725 chunks[i] = buffer->GetChunk(&chunk_index).release(); 2727 chunks[i] = buffer->GetChunk(&chunk_index).release();
2726 EXPECT_TRUE(chunks[i]); 2728 EXPECT_TRUE(chunks[i]);
2727 EXPECT_EQ(num_chunks - i - 1, chunk_index); 2729 EXPECT_EQ(num_chunks - i - 1, chunk_index);
2728 EXPECT_GT(chunks[i]->seq(), last_seq); 2730 EXPECT_GT(chunks[i]->seq(), last_seq);
2729 last_seq = chunks[i]->seq(); 2731 last_seq = chunks[i]->seq();
2730 } 2732 }
2731 2733
2732 for (size_t i = 0; i < num_chunks; ++i) 2734 for (size_t i = 0; i < num_chunks; ++i)
2733 buffer->ReturnChunk(i, scoped_ptr<TraceBufferChunk>(chunks[i])); 2735 buffer->ReturnChunk(i, std::unique_ptr<TraceBufferChunk>(chunks[i]));
2734 2736
2735 TraceLog::GetInstance()->SetDisabled(); 2737 TraceLog::GetInstance()->SetDisabled();
2736 } 2738 }
2737 2739
2738 TEST_F(TraceEventTestFixture, TraceBufferRingBufferHalfIteration) { 2740 TEST_F(TraceEventTestFixture, TraceBufferRingBufferHalfIteration) {
2739 TraceLog::GetInstance()->SetEnabled( 2741 TraceLog::GetInstance()->SetEnabled(
2740 TraceConfig(kRecordAllCategoryFilter, RECORD_CONTINUOUSLY), 2742 TraceConfig(kRecordAllCategoryFilter, RECORD_CONTINUOUSLY),
2741 TraceLog::RECORDING_MODE); 2743 TraceLog::RECORDING_MODE);
2742 TraceBuffer* buffer = TraceLog::GetInstance()->trace_buffer(); 2744 TraceBuffer* buffer = TraceLog::GetInstance()->trace_buffer();
2743 size_t capacity = buffer->Capacity(); 2745 size_t capacity = buffer->Capacity();
2744 size_t num_chunks = capacity / TraceBufferChunk::kTraceBufferChunkSize; 2746 size_t num_chunks = capacity / TraceBufferChunk::kTraceBufferChunkSize;
2745 size_t chunk_index; 2747 size_t chunk_index;
2746 EXPECT_EQ(0u, buffer->Size()); 2748 EXPECT_EQ(0u, buffer->Size());
2747 EXPECT_FALSE(buffer->NextChunk()); 2749 EXPECT_FALSE(buffer->NextChunk());
2748 2750
2749 size_t half_chunks = num_chunks / 2; 2751 size_t half_chunks = num_chunks / 2;
2750 scoped_ptr<TraceBufferChunk*[]> chunks(new TraceBufferChunk*[half_chunks]); 2752 std::unique_ptr<TraceBufferChunk* []> chunks(
2753 new TraceBufferChunk*[half_chunks]);
2751 2754
2752 for (size_t i = 0; i < half_chunks; ++i) { 2755 for (size_t i = 0; i < half_chunks; ++i) {
2753 chunks[i] = buffer->GetChunk(&chunk_index).release(); 2756 chunks[i] = buffer->GetChunk(&chunk_index).release();
2754 EXPECT_TRUE(chunks[i]); 2757 EXPECT_TRUE(chunks[i]);
2755 EXPECT_EQ(i, chunk_index); 2758 EXPECT_EQ(i, chunk_index);
2756 } 2759 }
2757 for (size_t i = 0; i < half_chunks; ++i) 2760 for (size_t i = 0; i < half_chunks; ++i)
2758 buffer->ReturnChunk(i, scoped_ptr<TraceBufferChunk>(chunks[i])); 2761 buffer->ReturnChunk(i, std::unique_ptr<TraceBufferChunk>(chunks[i]));
2759 2762
2760 for (size_t i = 0; i < half_chunks; ++i) 2763 for (size_t i = 0; i < half_chunks; ++i)
2761 EXPECT_EQ(chunks[i], buffer->NextChunk()); 2764 EXPECT_EQ(chunks[i], buffer->NextChunk());
2762 EXPECT_FALSE(buffer->NextChunk()); 2765 EXPECT_FALSE(buffer->NextChunk());
2763 TraceLog::GetInstance()->SetDisabled(); 2766 TraceLog::GetInstance()->SetDisabled();
2764 } 2767 }
2765 2768
2766 TEST_F(TraceEventTestFixture, TraceBufferRingBufferFullIteration) { 2769 TEST_F(TraceEventTestFixture, TraceBufferRingBufferFullIteration) {
2767 TraceLog::GetInstance()->SetEnabled( 2770 TraceLog::GetInstance()->SetEnabled(
2768 TraceConfig(kRecordAllCategoryFilter, RECORD_CONTINUOUSLY), 2771 TraceConfig(kRecordAllCategoryFilter, RECORD_CONTINUOUSLY),
2769 TraceLog::RECORDING_MODE); 2772 TraceLog::RECORDING_MODE);
2770 TraceBuffer* buffer = TraceLog::GetInstance()->trace_buffer(); 2773 TraceBuffer* buffer = TraceLog::GetInstance()->trace_buffer();
2771 size_t capacity = buffer->Capacity(); 2774 size_t capacity = buffer->Capacity();
2772 size_t num_chunks = capacity / TraceBufferChunk::kTraceBufferChunkSize; 2775 size_t num_chunks = capacity / TraceBufferChunk::kTraceBufferChunkSize;
2773 size_t chunk_index; 2776 size_t chunk_index;
2774 EXPECT_EQ(0u, buffer->Size()); 2777 EXPECT_EQ(0u, buffer->Size());
2775 EXPECT_FALSE(buffer->NextChunk()); 2778 EXPECT_FALSE(buffer->NextChunk());
2776 2779
2777 scoped_ptr<TraceBufferChunk*[]> chunks(new TraceBufferChunk*[num_chunks]); 2780 std::unique_ptr<TraceBufferChunk* []> chunks(
2781 new TraceBufferChunk*[num_chunks]);
2778 2782
2779 for (size_t i = 0; i < num_chunks; ++i) { 2783 for (size_t i = 0; i < num_chunks; ++i) {
2780 chunks[i] = buffer->GetChunk(&chunk_index).release(); 2784 chunks[i] = buffer->GetChunk(&chunk_index).release();
2781 EXPECT_TRUE(chunks[i]); 2785 EXPECT_TRUE(chunks[i]);
2782 EXPECT_EQ(i, chunk_index); 2786 EXPECT_EQ(i, chunk_index);
2783 } 2787 }
2784 for (size_t i = 0; i < num_chunks; ++i) 2788 for (size_t i = 0; i < num_chunks; ++i)
2785 buffer->ReturnChunk(i, scoped_ptr<TraceBufferChunk>(chunks[i])); 2789 buffer->ReturnChunk(i, std::unique_ptr<TraceBufferChunk>(chunks[i]));
2786 2790
2787 for (size_t i = 0; i < num_chunks; ++i) 2791 for (size_t i = 0; i < num_chunks; ++i)
2788 EXPECT_TRUE(chunks[i] == buffer->NextChunk()); 2792 EXPECT_TRUE(chunks[i] == buffer->NextChunk());
2789 EXPECT_FALSE(buffer->NextChunk()); 2793 EXPECT_FALSE(buffer->NextChunk());
2790 TraceLog::GetInstance()->SetDisabled(); 2794 TraceLog::GetInstance()->SetDisabled();
2791 } 2795 }
2792 2796
2793 TEST_F(TraceEventTestFixture, TraceRecordAsMuchAsPossibleMode) { 2797 TEST_F(TraceEventTestFixture, TraceRecordAsMuchAsPossibleMode) {
2794 TraceLog::GetInstance()->SetEnabled( 2798 TraceLog::GetInstance()->SetEnabled(
2795 TraceConfig(kRecordAllCategoryFilter, RECORD_AS_MUCH_AS_POSSIBLE), 2799 TraceConfig(kRecordAllCategoryFilter, RECORD_AS_MUCH_AS_POSSIBLE),
(...skipping 284 matching lines...) Expand 10 before | Expand all | Expand 10 after
3080 } 3084 }
3081 3085
3082 TEST_F(TraceEventTestFixture, SyntheticDelayConfigurationToString) { 3086 TEST_F(TraceEventTestFixture, SyntheticDelayConfigurationToString) {
3083 const char filter[] = "DELAY(test.Delay;16;oneshot)"; 3087 const char filter[] = "DELAY(test.Delay;16;oneshot)";
3084 TraceConfig config(filter, ""); 3088 TraceConfig config(filter, "");
3085 EXPECT_EQ(filter, config.ToCategoryFilterString()); 3089 EXPECT_EQ(filter, config.ToCategoryFilterString());
3086 } 3090 }
3087 3091
3088 } // namespace trace_event 3092 } // namespace trace_event
3089 } // namespace base 3093 } // namespace base
OLDNEW
« no previous file with comments | « base/trace_event/trace_event_system_stats_monitor_unittest.cc ('k') | base/trace_event/trace_log.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698